Example usage for java.util.logging Logger getLogger

List of usage examples for java.util.logging Logger getLogger

Introduction

In this page you can find the example usage for java.util.logging Logger getLogger.

Prototype




@CallerSensitive
public static Logger getLogger(String name) 

Source Link

Document

Find or create a logger for a named subsystem.

Usage

From source file:com.relecotech.bbb.api.APIGenerator.java

public static void main(String[] args) {

    try {/*from w ww .  ja  v a2 s  .  c  o  m*/
        XmlParser.runAPI(new APIGenerator().createAPI("create",
                "attendeePW=ap&meetingID=random-9736617&moderatorPW=mp&name=random-9736617&record=false&voiceBridge=79380&welcome=%3Cbr%3EWelcome+to+%3Cb%3E%25%25CONFNAME%25%25%3C%2Fb%3E%21"));
        //join moderator
        // XmlParser.runAPI(new APIGenerator().createAPI("join", "fullName=User+6361063&meetingID=random-9736617&password=mp"));
        //   XmlParser.runAPI(new APIGenerator().createAPI("join", "fullName=User+6361063&meetingID=random-9736617&password=ap"));
        // XmlParser.runAPI(new APIGenerator().createAPI("isMeetingRunning", "meetingID=random-9736617"));
        // XmlParser.runAPI(new APIGenerator().createAPI("getMeetingInfo", "meetingID=random-9736617&password=mp"));
        //  XmlParser.runAPI(new APIGenerator().createAPI("end", "meetingID=random-9736617&password=mp"));
        //XmlParser.runAPI(new APIGenerator().createAPI("getMeetings", ""));
        // XmlParser.runAPI(new APIGenerator().createAPI("getDefaultConfigXML", ""));
        // XmlParser.runAPI(new APIGenerator().createAPI("getRecordings", "meetingID=random-9736617"));
        // XmlParser.runAPI(new APIGenerator().createAPI("publishRecordings", "publish=false&recordID=random-9736617"));
        // XmlParser.runAPI(new APIGenerator().createAPI("deleteRecordings", "recordID=random-9736617"));
        //join from mobile (as moderator
        //  XmlParser.runAPI(new APIGenerator().createAPI("join", "fullName=User+6361063&meetingID=random-9736617&password=mp"));
        //join from mobile (as attendee) 
        // XmlParser.runAPI(new APIGenerator().createAPI("join", "fullName=User+6361063&meetingID=random-9736617&password=ap"));
    } catch (TransformerException ex) {
        Logger.getLogger(APIGenerator.class.getName()).log(Level.SEVERE, null, ex);
    } catch (Exception ex) {
        Logger.getLogger(APIGenerator.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:edu.ehu.galan.lite.Example.java

public static void main(String[] args) {
    //initizalize ehcache system
    System.setProperty("net.sf.ehcache.enableShutdownHook", "true");
    if (CacheManager.getCacheManager("ehcacheLitet.xml") == null) {
        CacheManager.create("ehcacheLitet.xml");
    }//from   ww w . j ava2s . c  o  m
    cache = CacheManager.getInstance().getCache("LiteCache");
    //load the corpus to process
    Corpus corpus = new Corpus("en");
    //we spedify the directory and the database mapping (wikipedia in this case)
    corpus.loadCorpus("testCorpus", Document.SourceType.wikipedia);
    //will read the document using Illinois NLP utilities
    PlainTextDocumentReaderLBJEn parser = new PlainTextDocumentReaderLBJEn();
    AlgorithmRunner runner = new AlgorithmRunner();
    String resources = System.getProperty("user.dir") + "/resources/";
    //algorithms initializacion
    CValueAlgortithm cvalue = new CValueAlgortithm();
    cvalue.addNewProcessingFilter(new AdjPrepNounFilter());
    TFIDFAlgorithm tf = new TFIDFAlgorithm(new CaseStemmer(CaseStemmer.CaseType.lowercase), "en");
    ShallowParsingGrammarAlgortithm sha = new ShallowParsingGrammarAlgortithm(
            System.getProperty("user.dir") + "/resources/lite/" + "grammars/Cg2EnGrammar.grammar", "cg3/");
    KPMinerAlgorithm kp = new KPMinerAlgorithm();
    RakeAlgorithm ex = new RakeAlgorithm();
    ex.loadStopWordsList("resources/lite/stopWordLists/RakeStopLists/SmartStopListEn");
    ex.loadPunctStopWord("resources/lite/stopWordLists/RakeStopLists/RakePunctDefaultStopList");
    //algorithm submitting to execute them in parallel
    runner.submitAlgorithm(kp);
    runner.submitAlgorithm(cvalue);
    runner.submitAlgorithm(tf);
    runner.submitAlgorithm(ex);
    runner.submitAlgorithm(sha);
    //load stop list
    List<String> standardStop = null;
    try {
        standardStop = Files.readAllLines(Paths.get(resources + "lite/stopWordLists/standardStopList"),
                StandardCharsets.UTF_8);

    } catch (IOException e1x) {
        Logger.getLogger(Example.class.getName()).log(Level.SEVERE, null, e1x);
    }
    //initialize Wikiminer helper (class that interacts with Wikiminer services)
    WikiminnerHelper helper = WikiminnerHelper.getInstance(resources);
    helper.setLanguage("en");
    //we may operate in local mode (using Wikiminer as API instead of interacting via REST api
    // helper.setLocalMode(false,"/home/angel/nfs/wikiminer/configs/wikipedia");
    WikiMinerMap wikimapping = new WikiMinerMap(resources, helper);
    CValueWikiDisambiguator disambiguator = new CValueWikiDisambiguator(resources, helper);
    CValueWikiRelationship relate = new CValueWikiRelationship(resources, helper);
    WikipediaData data = new WikipediaData(resources, helper);
    helper.openConnection();
    //process all the documents in the corpus
    while (!corpus.getDocQueue().isEmpty()) {
        Document doc = corpus.getDocQueue().poll();
        doc.setSource(Document.SourceType.wikipedia);
        parser.readSource(doc.getPath());
        doc.setSentenceList(parser.getSentenceList());
        doc.setTokenList(parser.getTokenizedSentenceList());
        System.out.println(doc.getName());
        runner.runAlgorihms(doc, resources);
        doc.applyGlobalStopWordList(standardStop);
        doc.mapThreshold(1.9f, new String[] { "CValue" });
        doc.mapThreshold(0.00034554f, new String[] { "TFIDF" });
        doc.removeAndMixTerms();
        //map document
        wikimapping.mapCorpus(doc);
        disambiguator.disambiguateTopics(doc);
        //we may disambiguate topics that do not disambiguated correctly
        DuplicateRemoval.disambiguationRemoval(doc);
        DuplicateRemoval.topicDuplicateRemoval(doc);
        //obtain the wiki links,labels, etc
        data.processDocument(doc);
        //measure domain relatedness
        relate.relate(doc);
        //save the results
        Document.saveJsonToDir("", doc);
    }
    //close wikiminer connection and caches
    helper.closeConnection();
    cache.dispose();
    CacheManager.getInstance().shutdown();
    System.exit(0);
}

From source file:fr.ericlab.mabed.app.Main.java

public static void main(String[] args) throws IOException {
    Locale.setDefault(Locale.US);
    Configuration configuration = new Configuration();
    Corpus corpus = new Corpus(configuration);
    System.out.println("MABED: Mention-Anomaly-Based Event Detection");
    if (args.length == 0 || args[0].equals("-help")) {
        System.out.println("For more information on how to run MABED, see the README.txt file");
    } else {/*  w w  w .j  a va2  s . com*/
        if (args[0].equals("-run")) {
            try {
                if (configuration.numberOfThreads > 1) {
                    System.out.println("Running the parallelized implementation with "
                            + configuration.numberOfThreads + " threads (this computer has "
                            + Runtime.getRuntime().availableProcessors() + " available threads)");
                } else {
                    System.out.println("Running the centralized implementation");
                }
                corpus.loadCorpus(configuration.numberOfThreads > 1);
                String output = "MABED: Mention-Anomaly-Based Event Detection\n" + corpus.output + "\n";
                System.out.println("-------------------------\n" + Util.getDate()
                        + " MABED is running\n-------------------------");
                output += "-------------------------\n" + Util.getDate()
                        + " MABED is running\n-------------------------\n";
                System.out.println(Util.getDate() + " Reading parameters:\n   - k = " + configuration.k
                        + ", p = " + configuration.p + ", theta = " + configuration.theta + ", sigma = "
                        + configuration.sigma);
                MABED mabed = new MABED();
                if (configuration.numberOfThreads > 1) {
                    output += mabed.applyParallelized(corpus, configuration);
                } else {
                    output += mabed.applyCentralized(corpus, configuration);
                }
                System.out.println(
                        "--------------------\n" + Util.getDate() + " MABED ended\n--------------------");
                output += "--------------------\n" + Util.getDate() + " MABED ended\n--------------------\n";
                File outputDir = new File("output");
                if (!outputDir.isDirectory()) {
                    outputDir.mkdir();
                }
                File textFile = new File("output/MABED.tex");
                FileUtils.writeStringToFile(textFile, mabed.events.toLatex(corpus), false);
                textFile = new File("output/MABED.log");
                FileUtils.writeStringToFile(textFile, output, false);
                mabed.events.printLatex(corpus);
            } catch (InterruptedException ex) {
                Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
            }
        } else {
            System.out.println("Unknown option '" + args[0]
                    + "'\nType 'java -jar MABED.jar -help' for more information on how to run MABED");
        }
    }
}

From source file:di.uniba.it.tee2.wiki.Wikidump2Text.java

/**
 * @param args the command line arguments
 *///  ww  w. j a va 2  s  .  c  om
public static void main(String[] args) {
    try {
        CommandLine cmd = cmdParser.parse(options, args);
        if (cmd.hasOption("l") && cmd.hasOption("d") && cmd.hasOption("o")) {
            encoding = cmd.getOptionValue("e", "UTF-8");
            int counter = 0;
            try {
                BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(
                        new GZIPOutputStream(new FileOutputStream(cmd.getOptionValue("o"))), "UTF-8"));
                WikipediaDumpIterator it = new WikipediaDumpIterator(new File(cmd.getOptionValue("d")),
                        encoding);
                PageCleaner cleaner = PageCleanerWrapper.getInstance(cmd.getOptionValue("l"));
                while (it.hasNext()) {
                    WikiPage wikiPage = it.next();
                    ParsedPage parsedPage = wikiPage.getParsedPage();
                    if (parsedPage != null) {
                        String title = wikiPage.getTitle();
                        if (!title.matches(notValidTitle)) {
                            if (parsedPage.getText() != null) {
                                writer.append(cleaner.clean(parsedPage.getText()));
                                writer.newLine();
                                writer.newLine();
                                counter++;
                                if (counter % 10000 == 0) {
                                    System.out.println(counter);
                                    writer.flush();
                                }
                            }
                        }
                    }
                }
                writer.flush();
                writer.close();
            } catch (Exception ex) {
                Logger.getLogger(Wikidump2Text.class.getName()).log(Level.SEVERE, null, ex);
            }
            System.out.println("Indexed pages: " + counter);
        } else {
            HelpFormatter helpFormatter = new HelpFormatter();
            helpFormatter.printHelp("Wikipedia dump to text", options, true);
        }
    } catch (ParseException ex) {
        Logger.getLogger(Wikidump2Text.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:net.minecraftforge.fml.common.patcher.GenDiffSet.java

public static void main(String[] args) throws IOException {
    String sourceJar = args[0]; //Clean Vanilla jar minecraft.jar or minecraft_server.jar
    String targetDir = args[1]; //Directory containing obfed output classes, typically mcp/reobf/minecraft
    String deobfData = args[2]; //Path to FML's deobfusication_data.lzma
    String outputDir = args[3]; //Path to place generated .binpatch
    String killTarget = args[4]; //"true" if we should destroy the target file if it generated a successful .binpatch

    LogManager.getLogger("GENDIFF").log(Level.INFO,
            String.format("Creating patches at %s for %s from %s", outputDir, sourceJar, targetDir));
    Delta delta = new Delta();
    FMLDeobfuscatingRemapper remapper = FMLDeobfuscatingRemapper.INSTANCE;
    remapper.setupLoadOnly(deobfData, false);
    JarFile sourceZip = new JarFile(sourceJar);
    boolean kill = killTarget.equalsIgnoreCase("true");

    File f = new File(outputDir);
    f.mkdirs();//from w  w  w  .j  a va2  s . com

    for (String name : remapper.getObfedClasses()) {
        //            Logger.getLogger("GENDIFF").info(String.format("Evaluating path for data :%s",name));
        String fileName = name;
        String jarName = name;
        if (RESERVED_NAMES.contains(name.toUpperCase(Locale.ENGLISH))) {
            fileName = "_" + name;
        }
        File targetFile = new File(targetDir, fileName.replace('/', File.separatorChar) + ".class");
        jarName = jarName + ".class";
        if (targetFile.exists()) {
            String sourceClassName = name.replace('/', '.');
            String targetClassName = remapper.map(name).replace('/', '.');
            JarEntry entry = sourceZip.getJarEntry(jarName);
            byte[] vanillaBytes = toByteArray(sourceZip, entry);
            byte[] patchedBytes = Files.toByteArray(targetFile);

            byte[] diff = delta.compute(vanillaBytes, patchedBytes);

            ByteArrayDataOutput diffOut = ByteStreams.newDataOutput(diff.length + 50);
            // Original name
            diffOut.writeUTF(name);
            // Source name
            diffOut.writeUTF(sourceClassName);
            // Target name
            diffOut.writeUTF(targetClassName);
            // exists at original
            diffOut.writeBoolean(entry != null);
            if (entry != null) {
                diffOut.writeInt(Hashing.adler32().hashBytes(vanillaBytes).asInt());
            }
            // length of patch
            diffOut.writeInt(diff.length);
            // patch
            diffOut.write(diff);

            File target = new File(outputDir, targetClassName + ".binpatch");
            target.getParentFile().mkdirs();
            Files.write(diffOut.toByteArray(), target);
            Logger.getLogger("GENDIFF").info(String.format("Wrote patch for %s (%s) at %s", name,
                    targetClassName, target.getAbsolutePath()));
            if (kill) {
                targetFile.delete();
                Logger.getLogger("GENDIFF").info(String.format("  Deleted target: %s", targetFile.toString()));
            }
        }
    }
    sourceZip.close();
}

From source file:gr.forth.ics.isl.preprocessfilter1.controller.Controller.java

public static void main(String[] args) throws XPathExpressionException, ParserConfigurationException,
        SAXException, IOException, PreprocessFilterException, org.apache.commons.cli.ParseException {
    PropertyReader prop = new PropertyReader();

    //The following block of code is executed if there are arguments from the command line
    if (args.length > 0) {

        try {//  w w w  .jav a 2  s  . c o  m

            //The values of the arguments are handled as Option instances
            Options options = new Options();
            CommandLineParser PARSER = new PosixParser();

            Option inputFile = new Option("inputFile", true, "input xml file");
            Option outputFile = new Option("outputFile", true, "output xml file");
            Option parentNode = new Option("parentNode", true, "output xml file");
            Option delimeter = new Option("delimeter", true, "output xml file");
            Option newParentNode = new Option("newParentNode", true, "output xml file");
            Option intermediateNodes = new Option("intermediateNodes", true, "output xml file");
            Option intermediateNode = new Option("intermediateNode", true, "output xml file");

            options.addOption(inputFile).addOption(outputFile).addOption(parentNode).addOption(newParentNode)
                    .addOption(intermediateNode).addOption(intermediateNodes).addOption(delimeter);

            CommandLine cli = PARSER.parse(options, args);

            String inputFileArg = cli.getOptionValue("inputFile");
            String outputFileArg = cli.getOptionValue("outputFile");
            String parentNodeArg = cli.getOptionValue("parentNode");
            String newParentNodeArg = cli.getOptionValue("newParentNode");
            String intermediateNodeArg = cli.getOptionValue("intermediateNode");
            String intermediateNodesArg = cli.getOptionValue("intermediateNodes");
            String delimeterArg = cli.getOptionValue("delimeter");

            PreprocessFilterUtilities process = new PreprocessFilterUtilities();

            //System.out.println("INPUT:"+inputFileArg);
            //System.out.println("OUTPUT:"+outputFileArg);
            //System.out.println("PARENT NODE:"+parentNodeArg);
            //System.out.println("NEW PARENT NODE:"+newParentNodeArg);
            //System.out.println("INTERMEDIATE NODE:"+intermediateNodeArg);
            //System.out.println("INTERMEDIATE NODES:"+intermediateNodesArg);
            //System.out.println("DELIMETER:"+delimeterArg);
            //The filter's code is executed with the command line arguments as parameters
            if (process.createOutputFile(inputFileArg, outputFileArg, parentNodeArg, newParentNodeArg,
                    intermediateNodeArg, intermediateNodesArg, delimeterArg)) {
                System.out.println("Succesfull PreProcessing!!!");
            }
        } catch (PreprocessFilterException ex) {
            Logger.getLogger(Controller.class.getName()).log(Level.SEVERE, null, ex);
            throw new PreprocessFilterException("PreProcess Filter Exception:", ex);
        }

    } //If there are no command line arguments then the .config file is being used.
    else {

        try {

            String inputFilePathProp = prop.getProperty(inputFilePath);
            String outputFilePathProp = prop.getProperty(outputFilePath);
            String parentNodeProp = prop.getProperty(parentNode);
            String delimeterProp = prop.getProperty(delimeter);

            String newParentNodeProp = prop.getProperty(newParentNode);
            String intermediateNodesProp = prop.getProperty(intermediateNodes);
            String intermediateNodeProp = prop.getProperty(intermediateNode);

            PreprocessFilterUtilities process = new PreprocessFilterUtilities();

            //The filter's code is executed with the .config file's resources as parameters
            if (process.createOutputFile(inputFilePathProp, outputFilePathProp, parentNodeProp,
                    newParentNodeProp, intermediateNodeProp, intermediateNodesProp, delimeterProp)) {
                System.out.println("Succesfull PreProcessing!!!");
            }
        } catch (PreprocessFilterException ex) {
            Logger.getLogger(Controller.class.getName()).log(Level.SEVERE, null, ex);
            throw new PreprocessFilterException("PreProcess Filter Exception:", ex);
        }

    }
}

From source file:fresto.datastore.EventLogWriter.java

public static void main(String[] args) throws Exception {
    if (args.length != 2) {
        LOGGER.severe("Argumests needed : <frontHost> <frontPort>");
        System.exit(1);//from w w w. j  a v a2s .c o m
    } else {
        frontHost = args[0];
        frontPort = args[1];
        LOGGER.info("Connecting... " + frontHost + ":" + frontPort + " with SUB");
    }

    final ZMQ.Context context = ZMQ.context(1);

    final FrestoEventQueue frestoEventQueue = new FrestoEventQueue();

    final Thread queueMonitorThread = new Thread() {
        Logger _LOGGER = Logger.getLogger("logWriteThread");

        @Override
        public void run() {
            while (work) {
                try {
                    _LOGGER.info("frestoEventQueue size = " + frestoEventQueue.size());
                    Thread.sleep(1000);
                } catch (InterruptedException ie) {
                }

            }
        }
    };

    final Thread logWriteThread = new Thread() {
        Logger _LOGGER = Logger.getLogger("logWriteThread");

        @Override
        public void run() {
            //FrestoStopWatch _watch = new FrestoStopWatch();
            //FrestoStopWatch _durationWatch = new FrestoStopWatch();

            EventLogWriter eventLogWriter = new EventLogWriter();

            // Open database
            //eventLogWriter.openTitanGraph();

            ZMQ.Socket receiver = null;
            //if("pull".equalsIgnoreCase(subOrPull)) {
            //   receiver = context.socket(ZMQ.PULL);
            //   receiver.connect("tcp://" + frontHost + ":" + frontPort);
            //} else if("sub".equalsIgnoreCase(subOrPull)) {
            receiver = context.socket(ZMQ.SUB);
            receiver.connect("tcp://" + frontHost + ":" + frontPort);
            receiver.subscribe("".getBytes());
            //} else {
            //   LOGGER.severe(subOrPull + " is not supported.");
            //   System.exit(1);
            //}

            //Consume socket data
            frestoEventQueue.setPullerSocket(receiver);
            frestoEventQueue.start();

            int waitingEventCount = 0;
            //int count = 0;
            //long elapsedTime = 0;
            //long duration = 0;

            //_durationWatch.start();

            while (work) {

                // To wait until at least one event in queue
                if (frestoEventQueue.isEmpty()) {
                    try {
                        //_LOGGER.info("FrestoEventQueue is empty. Waiting " + SLEEP_TIME + "ms...");
                        Thread.sleep(SLEEP_TIME);
                        continue;
                    } catch (InterruptedException ie) {
                    }
                }

                waitingEventCount = frestoEventQueue.size();

                for (int i = 0; i < waitingEventCount; i++) {
                    //_watch.start();
                    //count++;

                    FrestoEvent frestoEvent = frestoEventQueue.poll();

                    try {
                        eventLogWriter.writeEventData(frestoEvent.topic, frestoEvent.eventBytes);
                    } catch (Exception e) {
                        e.printStackTrace();
                    } finally {
                        //
                    }

                    //elapsedTime += _watch.stop();
                    //duration += _durationWatch.stop();

                    //if(count == maxCommitCount) {
                    //   eventLogWriter.commitGraph();
                    //   _LOGGER.info(count + " events processed for " + elapsedTime + " ms. (total time " + duration + " ms.) Remaining events " + frestoEventQueue.size());
                    //   
                    //   count = 0;
                    //   elapsedTime = 0;
                    //   duration = 0;
                    //   // Stop FOR clause
                    //}
                }

                //eventLogWriter.commitGraph();

                _LOGGER.info("Remaining events " + frestoEventQueue.size());

                //count = 0;
                //elapsedTime = 0;
                //duration = 0;
            }
            _LOGGER.info("Shutting down...");

            //if(g.isOpen()) {
            //   g.commit();
            //   g.shutdown();
            //}

            receiver.close();
            context.term();

            _LOGGER.info("Good bye.");
        }
    };

    Runtime.getRuntime().addShutdownHook(new Thread() {
        @Override
        public void run() {
            System.out.println(" Interrupt received, killing logger");
            // To break while clause
            frestoEventQueue.stopWork();
            work = false;

            try {
                logWriteThread.join();
                frestoEventQueue.join();
                //queueMonitorThread.join();

            } catch (InterruptedException e) {
                //
            }
        }
    });

    //queueMonitorThread.start();
    logWriteThread.start();

}

From source file:di.uniba.it.tri.aan.AAN2file.java

/**
 * Convert ACL dataset in a single file for each paper with year reference
 * aan_dir output_dir//from   w  ww  .  j  av a2s.c  o  m
 *
 * @param args the command line arguments
 */
public static void main(String[] args) {
    try {
        if (args.length > 1) {
            AAN2file ann = new AAN2file();
            ann.build(args[0], args[1]);
        } else {
            throw new Exception("Illegal arguments");
        }
    } catch (Exception ex) {
        Logger.getLogger(AAN2file.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:gr.demokritos.iit.demos.Demo.java

public static void main(String[] args) {
    try {/*from  www . jav  a2s. c  om*/
        Options options = new Options();
        options.addOption("h", HELP, false, "show help.");
        options.addOption("i", INPUT, true,
                "The file containing JSON " + " representations of tweets or SAG posts - 1 per line"
                        + " default file looked for is " + DEFAULT_INFILE);
        options.addOption("o", OUTPUT, true,
                "Where to write the output " + " default file looked for is " + DEFAULT_OUTFILE);
        options.addOption("p", PROCESS, true, "Type of processing to do "
                + " ner for Named Entity Recognition re for Relation Extraction" + " default is NER");
        options.addOption("s", SAG, false,
                "Whether to process as SAG posts" + " default is off - if passed means process as SAG posts");

        CommandLineParser parser = new BasicParser();
        CommandLine cmd = parser.parse(options, args);
        // DEFAULTS
        String filename = DEFAULT_INFILE;
        String outfilename = DEFAULT_OUTFILE;
        String process = NER;
        boolean isSAG = false;

        if (cmd.hasOption(HELP)) {
            HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp("NER + RE extraction module", options);
            System.exit(0);
        }
        if (cmd.hasOption(INPUT)) {
            filename = cmd.getOptionValue(INPUT);
        }
        if (cmd.hasOption(OUTPUT)) {
            outfilename = cmd.getOptionValue(OUTPUT);
        }
        if (cmd.hasOption(SAG)) {
            isSAG = true;
        }
        if (cmd.hasOption(PROCESS)) {
            process = cmd.getOptionValue(PROCESS);
        }
        System.out.println();
        System.out.println("Reading from file: " + filename);
        System.out.println("Process type: " + process);
        System.out.println("Processing SAG: " + isSAG);
        System.out.println("Writing to file: " + outfilename);
        System.out.println();

        List<String> jsoni = new ArrayList();
        Scanner in = new Scanner(new FileReader(filename));
        while (in.hasNextLine()) {
            String json = in.nextLine();
            jsoni.add(json);
        }
        PrintWriter writer = new PrintWriter(outfilename, "UTF-8");
        System.out.println("Read " + jsoni.size() + " lines from " + filename);
        if (process.equalsIgnoreCase(RE)) {
            System.out.println("Running Relation Extraction");
            System.out.println();
            String json = API.RE(jsoni, isSAG);
            System.out.println(json);
            writer.print(json);
        } else {
            System.out.println("Running Named Entity Recognition");
            System.out.println();
            jsoni = API.NER(jsoni, isSAG);
            /*
            for(String json: jsoni){
               NamedEntityList nel = NamedEntityList.fromJSON(json);
               nel.prettyPrint();
            }
            */
            for (String json : jsoni) {
                System.out.println(json);
                writer.print(json);
            }
        }
        writer.close();
    } catch (ParseException | UnsupportedEncodingException | FileNotFoundException ex) {
        Logger.getLogger(Demo.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:com.guye.baffle.obfuscate.Main.java

public static void main(String[] args) throws IOException, BaffleException {
    Options opt = new Options();

    opt.addOption("c", "config", true, "config file path,keep or mapping");

    opt.addOption("o", "output", true, "output mapping writer file");

    opt.addOption("v", "verbose", false, "explain what is being done.");

    opt.addOption("h", "help", false, "print help for the command.");

    opt.getOption("c").setArgName("file list");

    opt.getOption("o").setArgName("file path");

    String formatstr = "baffle [-c/--config filepaths list ][-o/--output filepath][-h/--help] ApkFile TargetApkFile";

    HelpFormatter formatter = new HelpFormatter();
    CommandLineParser parser = new PosixParser();
    CommandLine cl = null;/*  w  w w  . j  a  v  a2  s.  c  om*/
    try {
        // ?Options?
        cl = parser.parse(opt, args);

    } catch (ParseException e) {
        formatter.printHelp(formatstr, opt); // ???
        return;
    }

    if (cl == null || cl.getArgs() == null || cl.getArgs().length == 0) {
        formatter.printHelp(formatstr, opt);
        return;
    }

    // ?-h--help??
    if (cl.hasOption("h")) {
        HelpFormatter hf = new HelpFormatter();
        hf.printHelp(formatstr, "", opt, "");
        return;
    }

    // ???DirectoryName
    String[] str = cl.getArgs();
    if (str == null || str.length != 2) {
        HelpFormatter hf = new HelpFormatter();
        hf.printHelp("not specify apk file or taget apk file", opt);
        return;
    }

    if (str[1].equals(str[0])) {
        HelpFormatter hf = new HelpFormatter();
        hf.printHelp("apk file can not rewrite , please specify new target file", opt);
        return;
    }
    File apkFile = new File(str[0]);
    if (!apkFile.exists()) {
        HelpFormatter hf = new HelpFormatter();
        hf.printHelp("apk file not exists", opt);
        return;
    }

    File[] configs = null;
    if (cl.hasOption("c")) {
        String cfg = cl.getOptionValue("c");
        String[] fs = cfg.split(",");
        int len = fs.length;
        configs = new File[fs.length];
        for (int i = 0; i < len; i++) {
            configs[i] = new File(fs[i]);
            if (!configs[i].exists()) {
                HelpFormatter hf = new HelpFormatter();
                hf.printHelp("config file " + fs[i] + " not exists", opt);
                return;
            }
        }
    }

    File mappingfile = null;
    if (cl.hasOption("o")) {
        String mfile = cl.getOptionValue("o");
        mappingfile = new File(mfile);

        if (mappingfile.getParentFile() != null) {
            mappingfile.getParentFile().mkdirs();
        }

    }

    if (cl.hasOption('v')) {
        Logger.getLogger(Obfuscater.LOG_NAME).setLevel(Level.CONFIG);
    } else {
        Logger.getLogger(Obfuscater.LOG_NAME).setLevel(Level.OFF);
    }

    Logger.getLogger(Obfuscater.LOG_NAME).addHandler(new ConsoleHandler());

    Obfuscater obfuscater = new Obfuscater(configs, mappingfile, apkFile, str[1]);

    obfuscater.obfuscate();
}