Example usage for java.io BufferedWriter newLine

List of usage examples for java.io BufferedWriter newLine

Introduction

In this page you can find the example usage for java.io BufferedWriter newLine.

Prototype

public void newLine() throws IOException 

Source Link

Document

Writes a line separator.

Usage

From source file:org.apache.druid.query.aggregation.datasketches.quantiles.GenerateTestData.java

public static void main(String[] args) throws Exception {
    Path buildPath = FileSystems.getDefault().getPath("doubles_build_data.tsv");
    Path sketchPath = FileSystems.getDefault().getPath("doubles_sketch_data.tsv");
    BufferedWriter buildData = Files.newBufferedWriter(buildPath, StandardCharsets.UTF_8);
    BufferedWriter sketchData = Files.newBufferedWriter(sketchPath, StandardCharsets.UTF_8);
    Random rand = ThreadLocalRandom.current();
    int sequenceNumber = 0;
    for (int i = 0; i < 20; i++) {
        int product = rand.nextInt(10);
        UpdateDoublesSketch sketch = UpdateDoublesSketch.builder().build();
        for (int j = 0; j < 20; j++) {
            double value = rand.nextDouble();
            buildData.write("2016010101");
            buildData.write('\t');
            buildData.write(Integer.toString(sequenceNumber)); // dimension with unique numbers for ingesting raw data
            buildData.write('\t');
            buildData.write(Integer.toString(product)); // product dimension
            buildData.write('\t');
            buildData.write(Double.toString(value));
            buildData.newLine();
            sketch.update(value);//from   w  ww  .j av a 2  s.com
            sequenceNumber++;
        }
        sketchData.write("2016010101");
        sketchData.write('\t');
        sketchData.write(Integer.toString(product)); // product dimension
        sketchData.write('\t');
        sketchData.write(Base64.encodeBase64String(sketch.toByteArray(true)));
        sketchData.newLine();
    }
    buildData.close();
    sketchData.close();
}

From source file:di.uniba.it.tee2.wiki.Wikidump2Text.java

/**
 * @param args the command line arguments
 *///from   www . j  a  v a2 s. c  o m
public static void main(String[] args) {
    try {
        CommandLine cmd = cmdParser.parse(options, args);
        if (cmd.hasOption("l") && cmd.hasOption("d") && cmd.hasOption("o")) {
            encoding = cmd.getOptionValue("e", "UTF-8");
            int counter = 0;
            try {
                BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(
                        new GZIPOutputStream(new FileOutputStream(cmd.getOptionValue("o"))), "UTF-8"));
                WikipediaDumpIterator it = new WikipediaDumpIterator(new File(cmd.getOptionValue("d")),
                        encoding);
                PageCleaner cleaner = PageCleanerWrapper.getInstance(cmd.getOptionValue("l"));
                while (it.hasNext()) {
                    WikiPage wikiPage = it.next();
                    ParsedPage parsedPage = wikiPage.getParsedPage();
                    if (parsedPage != null) {
                        String title = wikiPage.getTitle();
                        if (!title.matches(notValidTitle)) {
                            if (parsedPage.getText() != null) {
                                writer.append(cleaner.clean(parsedPage.getText()));
                                writer.newLine();
                                writer.newLine();
                                counter++;
                                if (counter % 10000 == 0) {
                                    System.out.println(counter);
                                    writer.flush();
                                }
                            }
                        }
                    }
                }
                writer.flush();
                writer.close();
            } catch (Exception ex) {
                Logger.getLogger(Wikidump2Text.class.getName()).log(Level.SEVERE, null, ex);
            }
            System.out.println("Indexed pages: " + counter);
        } else {
            HelpFormatter helpFormatter = new HelpFormatter();
            helpFormatter.printHelp("Wikipedia dump to text", options, true);
        }
    } catch (ParseException ex) {
        Logger.getLogger(Wikidump2Text.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:edu.cmu.lti.oaqa.knn4qa.apps.QueryGenNMSLIB.java

public static void main(String[] args) {
    Options options = new Options();

    options.addOption(CommonParams.QUERY_FILE_PARAM, null, true, CommonParams.QUERY_FILE_DESC);
    options.addOption(CommonParams.MEMINDEX_PARAM, null, true, CommonParams.MEMINDEX_DESC);
    options.addOption(CommonParams.KNN_QUERIES_PARAM, null, true, CommonParams.KNN_QUERIES_DESC);
    options.addOption(CommonParams.NMSLIB_FIELDS_PARAM, null, true, CommonParams.NMSLIB_FIELDS_DESC);
    options.addOption(CommonParams.MAX_NUM_QUERY_PARAM, null, true, CommonParams.MAX_NUM_QUERY_DESC);
    options.addOption(CommonParams.SEL_PROB_PARAM, null, true, CommonParams.SEL_PROB_DESC);

    CommandLineParser parser = new org.apache.commons.cli.GnuParser();

    BufferedWriter knnQueries = null;

    int maxNumQuery = Integer.MAX_VALUE;

    Float selProb = null;//from ww  w . ja v a  2s. com

    try {
        CommandLine cmd = parser.parse(options, args);
        String queryFile = null;

        if (cmd.hasOption(CommonParams.QUERY_FILE_PARAM)) {
            queryFile = cmd.getOptionValue(CommonParams.QUERY_FILE_PARAM);
        } else {
            Usage("Specify 'query file'", options);
        }

        String knnQueriesFile = cmd.getOptionValue(CommonParams.KNN_QUERIES_PARAM);

        if (null == knnQueriesFile)
            Usage("Specify '" + CommonParams.KNN_QUERIES_DESC + "'", options);

        String tmpn = cmd.getOptionValue(CommonParams.MAX_NUM_QUERY_PARAM);
        if (tmpn != null) {
            try {
                maxNumQuery = Integer.parseInt(tmpn);
            } catch (NumberFormatException e) {
                Usage("Maximum number of queries isn't integer: '" + tmpn + "'", options);
            }
        }

        String tmps = cmd.getOptionValue(CommonParams.NMSLIB_FIELDS_PARAM);
        if (null == tmps)
            Usage("Specify '" + CommonParams.NMSLIB_FIELDS_DESC + "'", options);
        String nmslibFieldList[] = tmps.split(",");

        knnQueries = new BufferedWriter(new FileWriter(knnQueriesFile));
        knnQueries.write("isQueryFile=1");
        knnQueries.newLine();
        knnQueries.newLine();

        String memIndexPref = cmd.getOptionValue(CommonParams.MEMINDEX_PARAM);

        if (null == memIndexPref) {
            Usage("Specify '" + CommonParams.MEMINDEX_DESC + "'", options);
        }

        String tmpf = cmd.getOptionValue(CommonParams.SEL_PROB_PARAM);

        if (tmpf != null) {
            try {
                selProb = Float.parseFloat(tmpf);
            } catch (NumberFormatException e) {
                Usage("A selection probability isn't a number in the range (0,1)'" + tmpf + "'", options);
            }
            if (selProb < Float.MIN_NORMAL || selProb + Float.MIN_NORMAL >= 1)
                Usage("A selection probability isn't a number in the range (0,1)'" + tmpf + "'", options);
        }

        BufferedReader inpText = new BufferedReader(
                new InputStreamReader(CompressUtils.createInputStream(queryFile)));

        String docText = XmlHelper.readNextXMLIndexEntry(inpText);

        NmslibQueryGenerator queryGen = new NmslibQueryGenerator(nmslibFieldList, memIndexPref);

        Random rnd = new Random();

        for (int docNum = 1; docNum <= maxNumQuery
                && docText != null; ++docNum, docText = XmlHelper.readNextXMLIndexEntry(inpText)) {
            if (selProb != null) {
                if (rnd.nextFloat() > selProb)
                    continue;
            }

            Map<String, String> docFields = null;

            try {
                docFields = XmlHelper.parseXMLIndexEntry(docText);

                String queryObjStr = queryGen.getStrObjForKNNService(docFields);

                knnQueries.append(queryObjStr);
                knnQueries.newLine();
            } catch (SAXException e) {
                System.err.println("Parsing error, offending DOC:" + NL + docText + " doc # " + docNum);
                throw new Exception("Parsing error.");
            }
        }

        knnQueries.close();
    } catch (ParseException e) {
        Usage("Cannot parse arguments", options);
        if (null != knnQueries)
            try {
                knnQueries.close();
            } catch (IOException e1) {
                e1.printStackTrace();
            }
    } catch (Exception e) {
        System.err.println("Terminating due to an exception: " + e);
        try {
            if (knnQueries != null)
                knnQueries.close();
        } catch (IOException e1) {
            e1.printStackTrace();
        }
        System.exit(1);
    }

    System.out.println("Terminated successfully!");
}

From source file:edu.cmu.lti.oaqa.knn4qa.apps.BuildRetrofitLexicons.java

public static void main(String[] args) {
    Options options = new Options();

    options.addOption(CommonParams.GIZA_ROOT_DIR_PARAM, null, true, CommonParams.GIZA_ROOT_DIR_DESC);
    options.addOption(CommonParams.GIZA_ITER_QTY_PARAM, null, true, CommonParams.GIZA_ITER_QTY_DESC);
    options.addOption(CommonParams.MEMINDEX_PARAM, null, true, CommonParams.MEMINDEX_DESC);
    options.addOption(OUT_FILE_PARAM, null, true, OUT_FILE_DESC);
    options.addOption(MIN_PROB_PARAM, null, true, MIN_PROB_DESC);
    options.addOption(FORMAT_PARAM, null, true, FORMAT_DESC);

    CommandLineParser parser = new org.apache.commons.cli.GnuParser();

    try {/*  ww  w .  jav a2  s.c o m*/
        CommandLine cmd = parser.parse(options, args);
        String gizaRootDir = cmd.getOptionValue(CommonParams.GIZA_ROOT_DIR_PARAM);
        int gizaIterQty = -1;

        if (cmd.hasOption(CommonParams.GIZA_ITER_QTY_PARAM)) {
            gizaIterQty = Integer.parseInt(cmd.getOptionValue(CommonParams.GIZA_ITER_QTY_PARAM));
        } else {
            Usage("Specify: " + CommonParams.GIZA_ITER_QTY_PARAM, options);
        }
        String outFileName = cmd.getOptionValue(OUT_FILE_PARAM);
        if (null == outFileName) {
            Usage("Specify: " + OUT_FILE_PARAM, options);
        }

        String indexDir = cmd.getOptionValue(CommonParams.MEMINDEX_PARAM);

        if (null == indexDir) {
            Usage("Specify: " + CommonParams.MEMINDEX_DESC, options);
        }

        FormatType outType = FormatType.kOrig;

        String outTypeStr = cmd.getOptionValue(FORMAT_PARAM);

        if (null != outTypeStr) {
            if (outTypeStr.equals(ORIG_TYPE)) {
                outType = FormatType.kOrig;
            } else if (outTypeStr.equals(WEIGHTED_TYPE)) {
                outType = FormatType.kWeighted;
            } else if (outTypeStr.equals(UNWEIGHTED_TYPE)) {
                outType = FormatType.kUnweighted;
            } else {
                Usage("Unknown format type: " + outTypeStr, options);
            }
        }

        float minProb = 0;

        if (cmd.hasOption(MIN_PROB_PARAM)) {
            minProb = Float.parseFloat(cmd.getOptionValue(MIN_PROB_PARAM));
        } else {
            Usage("Specify: " + MIN_PROB_PARAM, options);
        }

        System.out.println(String.format(
                "Saving lexicon to '%s' (output format '%s'), keep only entries with translation probability >= %f",
                outFileName, outType.toString(), minProb));

        // We use unlemmatized text here, because lemmatized dictionary is going to be mostly subset of the unlemmatized one.
        InMemForwardIndex textIndex = new InMemForwardIndex(FeatureExtractor.indexFileName(indexDir,
                FeatureExtractor.mFieldNames[FeatureExtractor.TEXT_UNLEMM_FIELD_ID]));
        InMemForwardIndexFilterAndRecoder filterAndRecoder = new InMemForwardIndexFilterAndRecoder(textIndex);

        String prefix = gizaRootDir + "/" + FeatureExtractor.mFieldNames[FeatureExtractor.TEXT_UNLEMM_FIELD_ID]
                + "/";
        GizaVocabularyReader answVoc = new GizaVocabularyReader(prefix + "source.vcb", filterAndRecoder);
        GizaVocabularyReader questVoc = new GizaVocabularyReader(prefix + "target.vcb", filterAndRecoder);

        GizaTranTableReaderAndRecoder gizaTable = new GizaTranTableReaderAndRecoder(false, // we don't need to flip the table for the purpose 
                prefix + "/output.t1." + gizaIterQty, filterAndRecoder, answVoc, questVoc,
                (float) FeatureExtractor.DEFAULT_PROB_SELF_TRAN, minProb);
        BufferedWriter outFile = new BufferedWriter(new FileWriter(outFileName));

        for (int srcWordId = 0; srcWordId <= textIndex.getMaxWordId(); ++srcWordId) {
            GizaOneWordTranRecs tranRecs = gizaTable.getTranProbs(srcWordId);

            if (null != tranRecs) {
                String wordSrc = textIndex.getWord(srcWordId);
                StringBuffer sb = new StringBuffer();
                sb.append(wordSrc);

                for (int k = 0; k < tranRecs.mDstIds.length; ++k) {
                    float prob = tranRecs.mProbs[k];
                    if (prob >= minProb) {
                        int dstWordId = tranRecs.mDstIds[k];

                        if (dstWordId == srcWordId && outType != FormatType.kWeighted)
                            continue; // Don't duplicate the word, unless it's probability weighted

                        sb.append(' ');
                        String dstWord = textIndex.getWord(dstWordId);
                        if (null == dstWord) {
                            throw new Exception(
                                    "Bug or inconsistent data: Couldn't retriev a word for wordId = "
                                            + dstWordId);
                        }
                        if (dstWord.indexOf(':') >= 0)
                            throw new Exception(
                                    "Illegal dictionary word '" + dstWord + "' b/c it contains ':'");
                        sb.append(dstWord);
                        if (outType != FormatType.kOrig) {
                            sb.append(':');
                            sb.append(outType == FormatType.kWeighted ? prob : 1);
                        }
                    }
                }

                outFile.write(sb.toString());
                outFile.newLine();
            }
        }

        outFile.close();
    } catch (ParseException e) {
        e.printStackTrace();
        Usage("Cannot parse arguments", options);
    } catch (Exception e) {
        e.printStackTrace();
        System.err.println("Terminating due to an exception: " + e);
        System.exit(1);
    }

    System.out.println("Terminated successfully!");

}

From source file:edu.cmu.lti.oaqa.knn4qa.apps.FilterTranTable.java

public static void main(String[] args) {
    Options options = new Options();

    options.addOption(INPUT_PARAM, null, true, INPUT_DESC);
    options.addOption(OUTPUT_PARAM, null, true, OUTPUT_DESC);
    options.addOption(CommonParams.MEM_FWD_INDEX_PARAM, null, true, CommonParams.MEM_FWD_INDEX_DESC);
    options.addOption(CommonParams.GIZA_ITER_QTY_PARAM, null, true, CommonParams.GIZA_ITER_QTY_PARAM);
    options.addOption(CommonParams.GIZA_ROOT_DIR_PARAM, null, true, CommonParams.GIZA_ROOT_DIR_PARAM);
    options.addOption(CommonParams.MIN_PROB_PARAM, null, true, CommonParams.MIN_PROB_DESC);
    options.addOption(CommonParams.MAX_WORD_QTY_PARAM, null, true, CommonParams.MAX_WORD_QTY_PARAM);

    CommandLineParser parser = new org.apache.commons.cli.GnuParser();

    try {/*www  . java2 s  .  c  o  m*/
        CommandLine cmd = parser.parse(options, args);

        String outputFile = null;

        outputFile = cmd.getOptionValue(OUTPUT_PARAM);
        if (null == outputFile) {
            Usage("Specify 'A name of the output file'", options);
        }

        String gizaRootDir = cmd.getOptionValue(CommonParams.GIZA_ROOT_DIR_PARAM);
        if (null == gizaRootDir) {
            Usage("Specify '" + CommonParams.GIZA_ROOT_DIR_DESC + "'", options);
        }

        String gizaIterQty = cmd.getOptionValue(CommonParams.GIZA_ITER_QTY_PARAM);

        if (null == gizaIterQty) {
            Usage("Specify '" + CommonParams.GIZA_ITER_QTY_DESC + "'", options);
        }

        float minProb = 0;

        String tmpf = cmd.getOptionValue(CommonParams.MIN_PROB_PARAM);

        if (tmpf != null) {
            minProb = Float.parseFloat(tmpf);
        }

        int maxWordQty = Integer.MAX_VALUE;

        String tmpi = cmd.getOptionValue(CommonParams.MAX_WORD_QTY_PARAM);

        if (null != tmpi) {
            maxWordQty = Integer.parseInt(tmpi);
        }

        String memFwdIndxName = cmd.getOptionValue(CommonParams.MEM_FWD_INDEX_PARAM);
        if (null == memFwdIndxName) {
            Usage("Specify '" + CommonParams.MEM_FWD_INDEX_DESC + "'", options);
        }

        System.out.println("Filtering index: " + memFwdIndxName + " max # of frequent words: " + maxWordQty
                + " min. probability:" + minProb);

        VocabularyFilterAndRecoder filter = new FrequentIndexWordFilterAndRecoder(memFwdIndxName, maxWordQty);

        String srcVocFile = CompressUtils.findFileVariant(gizaRootDir + "/source.vcb");

        System.out.println("Source vocabulary file: " + srcVocFile);

        GizaVocabularyReader srcVoc = new GizaVocabularyReader(srcVocFile, filter);

        String dstVocFile = CompressUtils.findFileVariant(gizaRootDir + "/target.vcb");

        System.out.println("Target vocabulary file: " + dstVocFile);

        GizaVocabularyReader dstVoc = new GizaVocabularyReader(CompressUtils.findFileVariant(dstVocFile),
                filter);

        String inputFile = CompressUtils.findFileVariant(gizaRootDir + "/output.t1." + gizaIterQty);

        BufferedReader finp = new BufferedReader(
                new InputStreamReader(CompressUtils.createInputStream(inputFile)));

        BufferedWriter fout = new BufferedWriter(
                new OutputStreamWriter(CompressUtils.createOutputStream(outputFile)));

        try {
            String line;
            int prevSrcId = -1;
            int wordQty = 0;
            long addedQty = 0;
            long totalQty = 0;
            boolean isNotFiltered = false;

            for (totalQty = 0; (line = finp.readLine()) != null;) {
                ++totalQty;
                // Skip empty lines
                line = line.trim();
                if (line.isEmpty())
                    continue;

                GizaTranRec rec = new GizaTranRec(line);

                if (rec.mSrcId != prevSrcId) {
                    ++wordQty;
                }
                if (totalQty % REPORT_INTERVAL_QTY == 0) {
                    System.out.println(String.format(
                            "Processed %d lines (%d source word entries) from '%s', added %d lines", totalQty,
                            wordQty, inputFile, addedQty));
                }

                // isNotFiltered should be set after procOneWord
                if (rec.mSrcId != prevSrcId) {
                    if (rec.mSrcId == 0)
                        isNotFiltered = true;
                    else {
                        String wordSrc = srcVoc.getWord(rec.mSrcId);
                        isNotFiltered = filter == null || (wordSrc != null && filter.checkWord(wordSrc));
                    }
                }

                prevSrcId = rec.mSrcId;

                if (rec.mProb >= minProb && isNotFiltered) {
                    String wordDst = dstVoc.getWord(rec.mDstId);

                    if (filter == null || (wordDst != null && filter.checkWord(wordDst))) {
                        fout.write(String.format(rec.mSrcId + " " + rec.mDstId + " " + rec.mProb));
                        fout.newLine();
                        addedQty++;
                    }
                }
            }

            System.out.println(
                    String.format("Processed %d lines (%d source word entries) from '%s', added %d lines",
                            totalQty, wordQty, inputFile, addedQty));

        } finally {
            finp.close();
            fout.close();
        }
    } catch (ParseException e) {
        Usage("Cannot parse arguments", options);
    } catch (Exception e) {
        e.printStackTrace();
        System.err.println("Terminating due to an exception: " + e);
        System.exit(1);
    }
}

From source file:com.twentyn.chemicalClassifier.Runner.java

public static void main(String[] args) throws Exception {
    BufferedReader reader = new BufferedReader(new FileReader(args[0]));
    BufferedWriter writer = new BufferedWriter(new FileWriter(args[1]));

    try {/*w  ww  .  j a v  a2 s  .  c  om*/
        Oscar oscar = new Oscar();

        String line = null;
        /* NOTE: this is exactly the wrong way to write a TSV reader.  Caveat emptor.
         * See http://tburette.github.io/blog/2014/05/25/so-you-want-to-write-your-own-CSV-code/
         * and then use org.apache.commons.csv.CSVParser instead.
         */
        while ((line = reader.readLine()) != null) {
            // TSV means split on tabs!  Nothing else will do.
            List<String> fields = Arrays.asList(line.split("\t"));
            // Choke if our invariants aren't satisfied.  We expect ever line to have a name and an InChI.
            if (fields.size() != 2) {
                throw new RuntimeException(
                        String.format("Found malformed line (all lines must have two fields: %s", line));
            }
            String name = fields.get(1);
            List<ResolvedNamedEntity> entities = oscar.findAndResolveNamedEntities(name);

            System.out.println("**********");
            System.out.println("Name: " + name);
            List<String> outputFields = new ArrayList<>(fields.size() + 1);
            outputFields.addAll(fields);
            if (entities.size() == 0) {
                System.out.println("No match");
                outputFields.add("noMatch");
            } else if (entities.size() == 1) {
                ResolvedNamedEntity entity = entities.get(0);
                NamedEntity ne = entity.getNamedEntity();
                if (ne.getStart() != 0 || ne.getEnd() != name.length()) {
                    System.out.println("Partial match");
                    printEntity(entity);
                    outputFields.add("partialMatch");
                } else {
                    System.out.println("Exact match");
                    printEntity(entity);
                    outputFields.add("exactMatch");
                    List<ChemicalStructure> structures = entity.getChemicalStructures(FormatType.STD_INCHI);
                    for (ChemicalStructure s : structures) {
                        outputFields.add(s.getValue());
                    }
                }
            } else { // Multiple matches found!
                System.out.println("Multiple matches");
                for (ResolvedNamedEntity e : entities) {
                    printEntity(e);
                }
                outputFields.add("multipleMatches");
            }

            writer.write(String.join("\t", outputFields));
            writer.newLine();
        }
    } finally {
        writer.flush();
        writer.close();
    }
}

From source file:org.apache.airavata.messaging.client.RabbitMQListener.java

public static void main(String[] args) {
    File file = new File("/tmp/latency_client");
    parseArguments(args);//  w  ww  .java 2  s . c om
    try {
        FileOutputStream fos = new FileOutputStream(file, false);
        final BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(fos));
        String brokerUrl = ServerSettings.getSetting(RABBITMQ_BROKER_URL);
        System.out.println("broker url " + brokerUrl);
        final String exchangeName = ServerSettings.getSetting(RABBITMQ_EXCHANGE_NAME);
        RabbitMQStatusConsumer consumer = new RabbitMQStatusConsumer(brokerUrl, exchangeName);
        consumer.listen(new MessageHandler() {
            @Override
            public Map<String, Object> getProperties() {
                Map<String, Object> props = new HashMap<String, Object>();
                List<String> routingKeys = new ArrayList<String>();
                if (allMessages) {
                    routingKeys.add("*");
                    routingKeys.add("*.*");
                    routingKeys.add("*.*.*");
                    routingKeys.add("*.*.*.*");
                    routingKeys.add("*.*.*.*.*");
                } else {
                    if (gatewayLevelMessages) {
                        routingKeys.add(gatewayId);
                        routingKeys.add(gatewayId + ".*");
                        routingKeys.add(gatewayId + ".*.*");
                        routingKeys.add(gatewayId + ".*.*.*");
                        routingKeys.add(gatewayId + ".*.*.*.*");
                    } else if (experimentLevelMessages) {
                        routingKeys.add(gatewayId);
                        routingKeys.add(gatewayId + "." + experimentId);
                        routingKeys.add(gatewayId + "." + experimentId + ".*");
                        routingKeys.add(gatewayId + "." + experimentId + ".*.*");
                        routingKeys.add(gatewayId + "." + experimentId + ".*.*.*");
                    } else if (jobLevelMessages) {
                        routingKeys.add(gatewayId);
                        routingKeys.add(gatewayId + "." + experimentId);
                        routingKeys.add(gatewayId + "." + experimentId + ".*");
                        routingKeys.add(gatewayId + "." + experimentId + ".*.*");
                        routingKeys.add(gatewayId + "." + experimentId + ".*." + jobId);
                    }
                }
                props.put(MessagingConstants.RABBIT_ROUTING_KEY, routingKeys);
                return props;
            }

            @Override
            public void onMessage(MessageContext message) {
                try {
                    long latency = System.currentTimeMillis() - message.getUpdatedTime().getTime();
                    bw.write(message.getMessageId() + " :" + latency);
                    bw.newLine();
                    bw.flush();
                } catch (IOException e) {
                    e.printStackTrace();
                }
                if (message.getType().equals(MessageType.EXPERIMENT)) {
                    try {
                        ExperimentStatusChangeEvent event = new ExperimentStatusChangeEvent();
                        TBase messageEvent = message.getEvent();
                        byte[] bytes = ThriftUtils.serializeThriftObject(messageEvent);
                        ThriftUtils.createThriftFromBytes(bytes, event);
                        System.out.println(" Message Received with message id '" + message.getMessageId()
                                + "' and with message type '" + message.getType() + "' and with state : '"
                                + event.getState().toString() + " for Gateway " + event.getGatewayId());
                    } catch (TException e) {
                        logger.error(e.getMessage(), e);
                    }
                } else if (message.getType().equals(MessageType.PROCESS)) {
                    /*try {
                    WorkflowNodeStatusChangeEvent event = new WorkflowNodeStatusChangeEvent();
                    TBase messageEvent = message.getEvent();
                    byte[] bytes = ThriftUtils.serializeThriftObject(messageEvent);
                    ThriftUtils.createThriftFromBytes(bytes, event);
                    System.out.println(" Message Received with message id '" + message.getMessageId()
                            + "' and with message type '" + message.getType() + "' and with state : '" + event.getState().toString() +
                            " for Gateway " + event.getWorkflowNodeIdentity().getGatewayId());
                    } catch (TException e) {
                    logger.error(e.getMessage(), e);
                    }*/
                } else if (message.getType().equals(MessageType.TASK)) {
                    try {
                        TaskStatusChangeEvent event = new TaskStatusChangeEvent();
                        TBase messageEvent = message.getEvent();
                        byte[] bytes = ThriftUtils.serializeThriftObject(messageEvent);
                        ThriftUtils.createThriftFromBytes(bytes, event);
                        System.out.println(" Message Received with message id '" + message.getMessageId()
                                + "' and with message type '" + message.getType() + "' and with state : '"
                                + event.getState().toString() + " for Gateway "
                                + event.getTaskIdentity().getGatewayId());
                    } catch (TException e) {
                        logger.error(e.getMessage(), e);
                    }
                } else if (message.getType().equals(MessageType.JOB)) {
                    try {
                        JobStatusChangeEvent event = new JobStatusChangeEvent();
                        TBase messageEvent = message.getEvent();
                        byte[] bytes = ThriftUtils.serializeThriftObject(messageEvent);
                        ThriftUtils.createThriftFromBytes(bytes, event);
                        System.out.println(" Message Received with message id '" + message.getMessageId()
                                + "' and with message type '" + message.getType() + "' and with state : '"
                                + event.getState().toString() + " for Gateway "
                                + event.getJobIdentity().getGatewayId());
                    } catch (TException e) {
                        logger.error(e.getMessage(), e);
                    }
                }
            }
        });
    } catch (ApplicationSettingsException e) {
        logger.error("Error reading airavata server properties", e);
    } catch (Exception e) {
        logger.error(e.getMessage(), e);
    }

}

From source file:diffhunter.DiffHunter.java

/**
 * @param args the command line arguments
 * @throws org.apache.commons.cli.ParseException
 * @throws java.io.IOException//from   w w w .  j  a  v a2  s .co  m
 */
public static void main(String[] args) throws ParseException, IOException {

    //String test_ = Paths.get("J:\\VishalData\\additional\\", "Sasan" + "_BDB").toAbsolutePath().toString();

    // TODO code application logic here
    /*args = new String[]
    {
    "-i", "-b", "J:\\VishalData\\additional\\Ptbp2_E18_5_cortex_CLIP_mm9_plus_strand_sorted.bed", "-r", "J:\\VishalData\\additional\\mouse_mm9.txt", "-o", "J:\\VishalData"
    };*/

    /*args = new String[]
    {
    "-c", "-r", "J:\\VishalData\\additional\\mouse_mm9.txt", "-1", "J:\\VishalData\\Ptbp2_Adult_testis_CLIP_mm9_plus_strand_sorted_BDB", "-2", "J:\\VishalData\\Ptbp2_E18_5_cortex_CLIP_mm9_plus_strand_sorted_BDB", "-w", "200", "-s", "50", "-o", "J:\\VishalData"
    };*/
    Options options = new Options();

    // add t option
    options.addOption("i", "index", false, "Indexing BED files.");
    options.addOption("b", "bed", true, "bed file to be indexed");
    options.addOption("o", "output", true, "Folder that the index/comparison file will be created.");
    options.addOption("r", "reference", true, "Reference annotation file to be used for indexing");
    options.addOption("c", "compare", false, "Finding differences between two conditions");
    options.addOption("1", "first", true, "First sample index location");
    options.addOption("2", "second", true, "Second sample index location");
    options.addOption("w", "window", true, "Length of window for identifying differences");
    options.addOption("s", "sliding", true, "Length of sliding");

    CommandLineParser parser = new BasicParser();
    CommandLine cmd = parser.parse(options, args);

    boolean indexing = false;
    boolean comparing = false;

    //Indexing!
    if (cmd.hasOption("i")) {
        //if(cmd.hasOption("1"))
        //System.err.println("sasan");

        //System.out.println("sasa");
        indexing = true;

    } else if (cmd.hasOption("c")) {
        //System.err.println("");
        comparing = true;

    } else {
        //System.err.println("Option is not deteced.");
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("diffhunter", options);
        return;
    }

    //Indexing is selected
    //
    if (indexing == true) {
        //Since indexing is true.
        //User have to provide file for indexing.
        if (!(cmd.hasOption("o") || cmd.hasOption("r") || cmd.hasOption("b"))) {
            HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp("diffhunter", options);
            return;
        }
        String bedfile_ = cmd.getOptionValue("b");
        String reference_file = cmd.getOptionValue("r");
        String folder_loc = cmd.getOptionValue("o");

        String sample_name = FilenameUtils.getBaseName(bedfile_);

        try (Database B2 = BerkeleyDB_Box.Get_BerkeleyDB(
                Paths.get(folder_loc, sample_name + "_BDB").toAbsolutePath().toString(), true, sample_name)) {
            Indexer indexing_ = new Indexer(reference_file);
            indexing_.Make_Index(B2, bedfile_,
                    Paths.get(folder_loc, sample_name + "_BDB").toAbsolutePath().toString());
            B2.close();

        }
    } else if (comparing == true) {
        if (!(cmd.hasOption("o") || cmd.hasOption("w") || cmd.hasOption("s") || cmd.hasOption("1")
                || cmd.hasOption("2"))) {
            HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp("diffhunter", options);
            return;
        }
        String folder_loc = cmd.getOptionValue("o");
        int window_ = Integer.parseInt(cmd.getOptionValue("w"));
        //int window_=600;

        int slide_ = Integer.parseInt(cmd.getOptionValue("s"));

        String first = cmd.getOptionValue("1").replace("_BDB", "");
        String second = cmd.getOptionValue("2").replace("_BDB", "");
        String reference_file = cmd.getOptionValue("r");
        //String folder_loc=cmd.getOptionValue("o");

        String sample_name_first = FilenameUtils.getBaseName(first);
        String sample_name_second = FilenameUtils.getBaseName(second);

        Database B1 = BerkeleyDB_Box.Get_BerkeleyDB(first + "_BDB", false, sample_name_first);
        Database B2 = BerkeleyDB_Box.Get_BerkeleyDB(second + "_BDB", false, sample_name_second);

        List<String> first_condition_genes = Files
                .lines(Paths.get(first + "_BDB", sample_name_first + ".txt").toAbsolutePath())
                .collect(Collectors.toList());
        List<String> second_condition_genes = Files
                .lines(Paths.get(second + "_BDB", sample_name_second + ".txt").toAbsolutePath())
                .collect(Collectors.toList());
        System.out.println("First and second condition are loaded!!! ");
        List<String> intersection_ = new ArrayList<>(first_condition_genes);
        intersection_.retainAll(second_condition_genes);

        BufferedWriter output = new BufferedWriter(
                new FileWriter(Paths.get(folder_loc, "differences_" + window_ + "_s" + slide_ + "_c" + ".txt")
                        .toAbsolutePath().toString(), false));
        List<Result_Window> final_results = Collections.synchronizedList(new ArrayList<>());
        Worker_New worker_class = new Worker_New();
        worker_class.Read_Reference(reference_file);

        while (!intersection_.isEmpty()) {
            List<String> selected_genes = new ArrayList<>();
            //if (intersection_.size()<=10000){selected_genes.addAll(intersection_.subList(0, intersection_.size()));}
            //else selected_genes.addAll(intersection_.subList(0, 10000));
            if (intersection_.size() <= intersection_.size()) {
                selected_genes.addAll(intersection_.subList(0, intersection_.size()));
            } else {
                selected_genes.addAll(intersection_.subList(0, intersection_.size()));
            }
            intersection_.removeAll(selected_genes);
            //System.out.println("Intersection count is:"+intersection_.size());
            //final List<Result_Window> resultssss_=new ArrayList<>();
            IntStream.range(0, selected_genes.size()).parallel().forEach(i -> {
                System.out.println(selected_genes.get(i) + "\tprocessing......");
                String gene_of_interest = selected_genes.get(i);//"ENSG00000142657|PGD";//intersection_.get(6);////"ENSG00000163395|IGFN1";//"ENSG00000270066|SCARNA2";
                int start = worker_class.dic_genes.get(gene_of_interest).start_loc;
                int end = worker_class.dic_genes.get(gene_of_interest).end_loc;

                Map<Integer, Integer> first_ = Collections.EMPTY_MAP;
                try {
                    first_ = BerkeleyDB_Box.Get_Coord_Read(B1, gene_of_interest);
                } catch (IOException | ClassNotFoundException ex) {
                    Logger.getLogger(DiffHunter.class.getName()).log(Level.SEVERE, null, ex);
                }

                Map<Integer, Integer> second_ = Collections.EMPTY_MAP;
                try {
                    second_ = BerkeleyDB_Box.Get_Coord_Read(B2, gene_of_interest);
                } catch (IOException | ClassNotFoundException ex) {
                    Logger.getLogger(DiffHunter.class.getName()).log(Level.SEVERE, null, ex);
                }
                List<Window> top_windows_first = worker_class.Get_Top_Windows(window_, first_, slide_);
                List<Window> top_windows_second = worker_class.Get_Top_Windows(window_, second_, slide_);
                //System.out.println("passed for window peak call for gene \t"+selected_genes.get(i));
                // System.out.println("top_window_first_Count\t"+top_windows_first.size());
                // System.out.println("top_window_second_Count\t"+top_windows_second.size());
                if (top_windows_first.isEmpty() && top_windows_second.isEmpty()) {
                    return;
                }

                List<Result_Window> res_temp = new Worker_New().Get_Significant_Windows(gene_of_interest, start,
                        end, top_windows_first, top_windows_second, second_, first_, sample_name_first,
                        sample_name_second, 0.01);
                if (!res_temp.isEmpty()) {
                    final_results.addAll(res_temp);//final_results.addAll(worker_class.Get_Significant_Windows(gene_of_interest, start, end, top_windows_first, top_windows_second, second_, first_, first_condition, second_condition, 0.01));

                } //System.out.println(selected_genes.get(i)+"\tprocessed.");

            });

            /*selected_genes.parallelStream().forEach(i ->
             {
                    
                    
             });*/
            List<Double> pvals = new ArrayList<>();

            for (int i = 0; i < final_results.size(); i++) {
                pvals.add(final_results.get(i).p_value);
            }
            List<Double> qvals = MultipleTestCorrection.benjaminiHochberg(pvals);

            System.out.println("Writing to file...");
            output.append("Gene_Symbol\tContributing_Sample\tStart\tEnd\tOddsRatio\tp_Value\tFDR");
            output.newLine();

            for (int i = 0; i < final_results.size(); i++) {
                Result_Window item = final_results.get(i);
                output.append(item.associated_gene_symbol + "\t" + item.contributing_windows + "\t"
                        + item.start_loc + "\t" + item.end_loc + "\t" + item.oddsratio_ + "\t" + item.p_value
                        + "\t" + qvals.get(i)); //+ "\t" + item.average_other_readcount_cotributing + "\t" + item.average_other_readcount_cotributing + "\t" + item.average_window_readcount_non + "\t" + item.average_other_readcount_non);
                output.newLine();
            }

            /* for (Result_Window item : final_results)
             {
            output.append(item.associated_gene_symbol + "\t" + item.contributing_windows + "\t" + item.start_loc + "\t" + item.end_loc + "\t" + item.oddsratio_ + "\t" + item.p_value); //+ "\t" + item.average_other_readcount_cotributing + "\t" + item.average_other_readcount_cotributing + "\t" + item.average_window_readcount_non + "\t" + item.average_other_readcount_non);
            output.newLine();
             }
               */
            final_results.clear();

        }
        output.close();

    }
    System.out.println("Done.");

}

From source file:com.yahoo.labs.yamall.local.Yamall.java

public static void main(String[] args) {
    String[] remainingArgs = null;
    String inputFile = null;/*from   w ww .  j  av a 2 s .  c o  m*/
    String predsFile = null;
    String saveModelFile = null;
    String initialModelFile = null;
    String lossName = null;
    String parserName = null;
    String linkName = null;
    String invertHashName = null;
    double learningRate = 1;
    String minPredictionString = null;
    String maxPredictionString = null;
    String fmNumberFactorsString = null;
    int bitsHash;
    int numberPasses;
    int holdoutPeriod = 10;

    boolean testOnly = false;
    boolean exponentialProgress;
    double progressInterval;

    options.addOption("h", "help", false, "displays this help");
    options.addOption("t", false, "ignore label information and just test");
    options.addOption(Option.builder().hasArg(false).required(false).longOpt("binary")
            .desc("reports loss as binary classification with -1,1 labels").build());
    options.addOption(
            Option.builder().hasArg(false).required(false).longOpt("solo").desc("uses SOLO optimizer").build());
    options.addOption(Option.builder().hasArg(false).required(false).longOpt("pcsolo")
            .desc("uses Per Coordinate SOLO optimizer").build());
    options.addOption(Option.builder().hasArg(false).required(false).longOpt("pistol")
            .desc("uses PiSTOL optimizer").build());
    options.addOption(Option.builder().hasArg(false).required(false).longOpt("kt")
            .desc("(EXPERIMENTAL) uses KT optimizer").build());
    options.addOption(Option.builder().hasArg(false).required(false).longOpt("pckt")
            .desc("(EXPERIMENTAL) uses Per Coordinate KT optimizer").build());
    options.addOption(Option.builder().hasArg(false).required(false).longOpt("pccocob")
            .desc("(EXPERIMENTAL) uses Per Coordinate COCOB optimizer").build());
    options.addOption(Option.builder().hasArg(false).required(false).longOpt("cocob")
            .desc("(EXPERIMENTAL) uses COCOB optimizer").build());
    options.addOption(
            Option.builder().hasArg(false).required(false).longOpt("fm").desc("Factorization Machine").build());
    options.addOption(Option.builder("f").hasArg(true).required(false).desc("final regressor to save")
            .type(String.class).longOpt("final_regressor").build());
    options.addOption(Option.builder("p").hasArg(true).required(false).desc("file to output predictions to")
            .longOpt("predictions").type(String.class).build());
    options.addOption(
            Option.builder("i").hasArg(true).required(false).desc("initial regressor(s) to load into memory")
                    .longOpt("initial_regressor").type(String.class).build());
    options.addOption(Option.builder().hasArg(true).required(false).desc(
            "specify the loss function to be used. Currently available ones are: absolute, squared (default), hinge, logistic")
            .longOpt("loss_function").type(String.class).build());
    options.addOption(Option.builder().hasArg(true).required(false).desc(
            "specify the link function used in the output of the predictions. Currently available ones are: identity (default), logistic")
            .longOpt("link").type(String.class).build());
    options.addOption(Option.builder().hasArg(true).required(false)
            .desc("output human-readable final regressor with feature names").longOpt("invert_hash")
            .type(String.class).build());
    options.addOption(
            Option.builder("l").hasArg(true).required(false).desc("set (initial) learning Rate, default = 1.0")
                    .longOpt("learning_rate").type(String.class).build());
    options.addOption(Option.builder("b").hasArg(true).required(false)
            .desc("number of bits in the feature table, default = 18").longOpt("bit_precision")
            .type(String.class).build());
    options.addOption(Option.builder("P").hasArg(true).required(false)
            .desc("progress update frequency, integer: additive; float: multiplicative, default = 2.0")
            .longOpt("progress").type(String.class).build());
    options.addOption(Option.builder().hasArg(true).required(false)
            .desc("smallest prediction to output, before the link function, default = -50")
            .longOpt("min_prediction").type(String.class).build());
    options.addOption(Option.builder().hasArg(true).required(false)
            .desc("smallest prediction to output, before the link function, default = 50")
            .longOpt("max_prediction").type(String.class).build());
    options.addOption(Option.builder().hasArg(true).required(false)
            .desc("ignore namespaces beginning with the characters in <arg>").longOpt("ignore")
            .type(String.class).build());
    options.addOption(Option.builder().hasArg(true).required(false).desc("number of training passes")
            .longOpt("passes").type(String.class).build());
    options.addOption(
            Option.builder().hasArg(true).required(false).desc("holdout period for test only, default = 10")
                    .longOpt("holdout_period").type(String.class).build());
    options.addOption(Option.builder().hasArg(true).required(false)
            .desc("number of factors for Factorization Machines default = 8").longOpt("fmNumberFactors")
            .type(String.class).build());
    options.addOption(Option.builder().hasArg(true).required(false)
            .desc("specify the parser to use. Currently available ones are: vw (default), libsvm, tsv")
            .longOpt("parser").type(String.class).build());
    options.addOption(Option.builder().hasArg(true).required(false).desc("schema file for the TSV input")
            .longOpt("schema").type(String.class).build());

    CommandLineParser parser = new DefaultParser();
    CommandLine cmd = null;
    try {
        cmd = parser.parse(options, args);
    } catch (ParseException e) {
        System.out.println("Unrecognized option");
        help();
    }
    if (cmd.hasOption("h"))
        help();
    if (cmd.hasOption("t"))
        testOnly = true;
    if (cmd.hasOption("binary")) {
        binary = true;
        System.out.println("Reporting binary loss");
    }
    initialModelFile = cmd.getOptionValue("i");
    predsFile = cmd.getOptionValue("p");
    lossName = cmd.getOptionValue("loss_function", "squared");
    linkName = cmd.getOptionValue("link", "identity");
    saveModelFile = cmd.getOptionValue("f");
    learningRate = Double.parseDouble(cmd.getOptionValue("l", "1.0"));
    bitsHash = Integer.parseInt(cmd.getOptionValue("b", "18"));
    invertHashName = cmd.getOptionValue("invert_hash");
    minPredictionString = cmd.getOptionValue("min_prediction", "-50");
    maxPredictionString = cmd.getOptionValue("max_prediction", "50");
    fmNumberFactorsString = cmd.getOptionValue("fmNumberFactors", "8");
    parserName = cmd.getOptionValue("parser", "vw");

    numberPasses = Integer.parseInt(cmd.getOptionValue("passes", "1"));
    System.out.println("Number of passes = " + numberPasses);
    if (numberPasses > 1) {
        holdoutPeriod = Integer.parseInt(cmd.getOptionValue("holdout_period", "10"));
        System.out.println("Holdout period = " + holdoutPeriod);
    }

    remainingArgs = cmd.getArgs();
    if (remainingArgs.length == 1)
        inputFile = remainingArgs[0];

    InstanceParser instanceParser = null;
    if (parserName.equals("vw"))
        instanceParser = new VWParser(bitsHash, cmd.getOptionValue("ignore"), (invertHashName != null));
    else if (parserName.equals("libsvm"))
        instanceParser = new LIBSVMParser(bitsHash, (invertHashName != null));
    else if (parserName.equals("tsv")) {
        String schema = cmd.getOptionValue("schema");
        if (schema == null) {
            System.out.println("TSV parser requires a schema file.");
            System.exit(0);
        } else {
            String spec = null;
            try {
                spec = new String(Files.readAllBytes(Paths.get(schema)));
            } catch (IOException e) {
                System.out.println("Error reading the TSV schema file.");
                e.printStackTrace();
                System.exit(0);
            }
            instanceParser = new TSVParser(bitsHash, cmd.getOptionValue("ignore"), (invertHashName != null),
                    spec);
        }
    } else {
        System.out.println("Unknown parser.");
        System.exit(0);
    }
    System.out.println("Num weight bits = " + bitsHash);

    // setup progress
    String progress = cmd.getOptionValue("P", "2.0");
    if (progress.indexOf('.') >= 0) {
        exponentialProgress = true;
        progressInterval = (double) Double.parseDouble(progress);
    } else {
        exponentialProgress = false;
        progressInterval = (double) Integer.parseInt(progress);
    }

    // min and max predictions
    minPrediction = (double) Double.parseDouble(minPredictionString);
    maxPrediction = (double) Double.parseDouble(maxPredictionString);

    // number of factors for Factorization Machines
    fmNumberFactors = (int) Integer.parseInt(fmNumberFactorsString);

    // configure the learner
    Loss lossFnc = null;
    LinkFunction link = null;
    if (initialModelFile == null) {
        if (cmd.hasOption("kt")) {
            learner = new KT(bitsHash);
        } else if (cmd.hasOption("pckt")) {
            learner = new PerCoordinateKT(bitsHash);
        } else if (cmd.hasOption("pcsolo")) {
            learner = new PerCoordinateSOLO(bitsHash);
        } else if (cmd.hasOption("solo")) {
            learner = new SOLO(bitsHash);
        } else if (cmd.hasOption("pccocob")) {
            learner = new PerCoordinateCOCOB(bitsHash);
        } else if (cmd.hasOption("cocob")) {
            learner = new COCOB(bitsHash);
        } else if (cmd.hasOption("pistol")) {
            learner = new PerCoordinatePiSTOL(bitsHash);
        } else if (cmd.hasOption("fm")) {
            learner = new SGD_FM(bitsHash, fmNumberFactors);
        } else
            learner = new SGD_VW(bitsHash);
    } else {
        learner = IOLearner.loadLearner(initialModelFile);
    }

    // setup link function
    if (linkName.equals("identity")) {
        link = new IdentityLinkFunction();
    } else if (linkName.equals("logistic")) {
        link = new LogisticLinkFunction();
    } else {
        System.out.println("Unknown link function.");
        System.exit(0);
    }

    // setup loss function
    if (lossName.equals("squared")) {
        lossFnc = new SquareLoss();
    } else if (lossName.equals("hinge")) {
        lossFnc = new HingeLoss();
    } else if (lossName.equals("logistic")) {
        lossFnc = new LogisticLoss();
    } else if (lossName.equals("absolute")) {
        lossFnc = new AbsLoss();
    } else {
        System.out.println("Unknown loss function.");
        System.exit(0);
    }

    learner.setLoss(lossFnc);
    learner.setLearningRate(learningRate);

    // maximum range predictions
    System.out.println("Max prediction = " + maxPrediction + ", Min Prediction = " + minPrediction);
    // print information about the learner
    System.out.println(learner.toString());
    // print information about the link function
    System.out.println(link.toString());
    // print information about the parser
    System.out.println(instanceParser.toString());
    // print information about ignored namespaces
    System.out.println("Ignored namespaces = " + cmd.getOptionValue("ignore", ""));

    long start = System.nanoTime();
    FileInputStream fstream;
    try {
        BufferedReader br = null;
        if (inputFile != null) {
            fstream = new FileInputStream(inputFile);
            System.out.println("Reading datafile = " + inputFile);
            br = new BufferedReader(new InputStreamReader(fstream));
        } else {
            System.out.println("Reading from console");
            br = new BufferedReader(new InputStreamReader(System.in));
        }

        File fout = null;
        FileOutputStream fos = null;
        BufferedWriter bw = null;
        if (predsFile != null) {
            fout = new File(predsFile);
            fos = new FileOutputStream(fout);
            bw = new BufferedWriter(new OutputStreamWriter(fos));
        }

        try {
            System.out.println("average       example  current  current  current");
            System.out.println("loss          counter    label  predict  features");
            int iter = 0;
            double cumLoss = 0;
            double weightedSampleSum = 0;
            double sPlus = 0;
            double sMinus = 0;
            Instance sample = null;
            boolean justPrinted = false;
            int pass = 0;
            ObjectOutputStream ooutTr = null;
            ObjectOutputStream ooutHO = null;
            ObjectInputStream oinTr = null;
            double pred = 0;
            int limit = 1;
            double hError = Double.MAX_VALUE;
            double lastHError = Double.MAX_VALUE;
            int numTestSample = 0;
            int numTrainingSample = 0;
            int idx = 0;

            if (numberPasses > 1) {
                ooutTr = new ObjectOutputStream(new FileOutputStream("cache_training.bin"));
                ooutHO = new ObjectOutputStream(new FileOutputStream("cache_holdout.bin"));
                oinTr = new ObjectInputStream(new FileInputStream("cache_training.bin"));
            }

            do {
                while (true) {
                    double score;

                    if (pass > 0 && numberPasses > 1) {
                        Instance tmp = (Instance) oinTr.readObject();
                        if (tmp != null)
                            sample = tmp;
                        else
                            break;
                    } else {
                        String strLine = br.readLine();
                        if (strLine != null)
                            sample = instanceParser.parse(strLine);
                        else
                            break;
                    }

                    justPrinted = false;
                    idx++;

                    if (numberPasses > 1 && pass == 0 && idx % holdoutPeriod == 0) {
                        // store the current sample for the holdout set
                        ooutHO.writeObject(sample);
                        ooutHO.reset();
                        numTestSample++;
                    } else {
                        if (numberPasses > 1 && pass == 0) {
                            ooutTr.writeObject(sample);
                            ooutTr.reset();
                            numTrainingSample++;
                        }

                        iter++;
                        if (testOnly) {
                            // predict the sample
                            score = learner.predict(sample);
                        } else {
                            // predict the sample and update the classifier using the sample
                            score = learner.update(sample);
                        }
                        score = Math.min(Math.max(score, minPrediction), maxPrediction);
                        pred = link.apply(score);
                        if (!binary)
                            cumLoss += learner.getLoss().lossValue(score, sample.getLabel())
                                    * sample.getWeight();
                        else if (Math.signum(score) != sample.getLabel())
                            cumLoss += sample.getWeight();

                        weightedSampleSum += sample.getWeight();
                        if (sample.getLabel() > 0)
                            sPlus = sPlus + sample.getWeight();
                        else
                            sMinus = sMinus + sample.getWeight();

                        // output predictions to file
                        if (predsFile != null) {
                            bw.write(String.format("%.6f %s", pred, sample.getTag()));
                            bw.newLine();
                        }

                        // print statistics to screen
                        if (iter == limit) {
                            justPrinted = true;
                            System.out.printf("%.6f %12d  % .4f  % .4f  %d\n", cumLoss / weightedSampleSum,
                                    iter, sample.getLabel(), pred, sample.getVector().size());
                            if (exponentialProgress)
                                limit *= progressInterval;
                            else
                                limit += progressInterval;
                        }
                    }
                }
                if (numberPasses > 1) {
                    if (pass == 0) { // finished first pass of many
                        // write a null at the end of the files
                        ooutTr.writeObject(null);
                        ooutHO.writeObject(null);
                        ooutTr.flush();
                        ooutHO.flush();
                        ooutTr.close();
                        ooutHO.close();

                        System.out.println("finished first epoch");
                        System.out.println(numTrainingSample + " training samples");
                        System.out.println(numTestSample + " holdout samples saved");
                    }
                    lastHError = hError;
                    hError = evalHoldoutError();
                }
                if (numberPasses > 1) {
                    System.out.printf("Weighted loss on holdout on epoch %d = %.6f\n", pass + 1, hError);

                    oinTr.close();
                    oinTr = new ObjectInputStream(new FileInputStream("cache_training.bin"));

                    if (hError > lastHError) {
                        System.out.println("Early stopping");
                        break;
                    }
                }
                pass++;
            } while (pass < numberPasses);

            if (justPrinted == false) {
                System.out.printf("%.6f %12d  % .4f  % .4f  %d\n", cumLoss / weightedSampleSum, iter,
                        sample.getLabel(), pred, sample.getVector().size());
            }
            System.out.println("finished run");

            System.out.println(String.format("average loss best constant predictor: %.6f",
                    lossFnc.lossConstantBinaryLabels(sPlus, sMinus)));

            if (saveModelFile != null)
                IOLearner.saveLearner(learner, saveModelFile);
            if (invertHashName != null)
                IOLearner.saveInvertHash(learner.getWeights(), instanceParser.getInvertHashMap(),
                        invertHashName);
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        } catch (ClassNotFoundException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

        // close the input stream
        try {
            br.close();
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        // close the output stream
        if (predsFile != null) {
            try {
                bw.close();
            } catch (IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
        }
        long millis = System.nanoTime() - start;
        System.out.printf("Elapsed time: %d min, %d sec\n", TimeUnit.NANOSECONDS.toMinutes(millis),
                TimeUnit.NANOSECONDS.toSeconds(millis) - 60 * TimeUnit.NANOSECONDS.toMinutes(millis));
    } catch (

    FileNotFoundException e) {
        System.out.println("Error opening the input file");
        e.printStackTrace();
    }

}

From source file:Main.java

static void writeToFollower(String name) {
    try {//from  w  ww .jav  a 2 s .co  m
        File root = new File(Environment.getExternalStorageDirectory().toString(), ".Instagram");
        if (!root.exists()) {
            root.mkdirs();

        }
        File file = new File(root, "Following.txt");
        BufferedWriter buf = new BufferedWriter(new FileWriter(file, true));
        buf.newLine();
        buf.append(name);
        buf.close();
    } catch (Throwable t) {
    }
}