Example usage for java.io BufferedWriter flush

List of usage examples for java.io BufferedWriter flush

Introduction

In this page you can find the example usage for java.io BufferedWriter flush.

Prototype

public void flush() throws IOException 

Source Link

Document

Flushes the stream.

Usage

From source file:org.matsim.contrib.drt.analysis.DynModeTripsAnalyser.java

public static void analyseWaitTimes(String fileName, List<DynModeTrip> trips, int binsize_s) {
    Collections.sort(trips);//from w  ww . ja va2s  .  c  om
    if (trips.size() == 0)
        return;
    int startTime = ((int) (trips.get(0).getDepartureTime() / binsize_s)) * binsize_s;
    int endTime = ((int) (trips.get(trips.size() - 1).getDepartureTime() / binsize_s) + binsize_s) * binsize_s;
    Map<Double, List<DynModeTrip>> splitTrips = splitTripsIntoBins(trips, startTime, endTime, binsize_s);

    DecimalFormat format = new DecimalFormat();
    format.setDecimalFormatSymbols(new DecimalFormatSymbols(Locale.US));
    format.setMinimumIntegerDigits(1);
    format.setMaximumFractionDigits(2);
    format.setGroupingUsed(false);

    SimpleDateFormat sdf2 = new SimpleDateFormat("HH:mm:ss");

    BufferedWriter bw = IOUtils.getBufferedWriter(fileName + ".csv");
    TimeSeriesCollection dataset = new TimeSeriesCollection();
    TimeSeriesCollection datasetrequ = new TimeSeriesCollection();
    TimeSeries averageWaitC = new TimeSeries("average");
    TimeSeries medianWait = new TimeSeries("median");
    TimeSeries p_5Wait = new TimeSeries("5th percentile");
    TimeSeries p_95Wait = new TimeSeries("95th percentile");
    TimeSeries requests = new TimeSeries("Ride requests");

    try {
        bw.write("timebin;trips;average_wait;min;p_5;p_25;median;p_75;p_95;max");
        for (Entry<Double, List<DynModeTrip>> e : splitTrips.entrySet()) {
            long rides = 0;
            double averageWait = 0;
            double min = 0;
            double p_5 = 0;
            double p_25 = 0;
            double median = 0;
            double p_75 = 0;
            double p_95 = 0;
            double max = 0;
            if (!e.getValue().isEmpty()) {
                DescriptiveStatistics stats = new DescriptiveStatistics();
                for (DynModeTrip t : e.getValue()) {
                    stats.addValue(t.getWaitTime());
                }
                rides = stats.getN();
                averageWait = stats.getMean();
                min = stats.getMin();
                p_5 = stats.getPercentile(5);
                p_25 = stats.getPercentile(25);
                median = stats.getPercentile(50);
                p_75 = stats.getPercentile(75);
                p_95 = stats.getPercentile(95);
                max = stats.getMax();

            }
            Minute h = new Minute(sdf2.parse(Time.writeTime(e.getKey())));

            medianWait.addOrUpdate(h, Double.valueOf(median));
            averageWaitC.addOrUpdate(h, Double.valueOf(averageWait));
            p_5Wait.addOrUpdate(h, Double.valueOf(p_5));
            p_95Wait.addOrUpdate(h, Double.valueOf(p_95));
            requests.addOrUpdate(h, rides * 3600. / binsize_s);// normalised [req/h]
            bw.newLine();
            bw.write(Time.writeTime(e.getKey()) + ";" + rides + ";" + format.format(averageWait) + ";"
                    + format.format(min) + ";" + format.format(p_5) + ";" + format.format(p_25) + ";"
                    + format.format(median) + ";" + format.format(p_75) + ";" + format.format(p_95) + ";"
                    + format.format(max));

        }
        bw.flush();
        bw.close();
        dataset.addSeries(averageWaitC);
        dataset.addSeries(medianWait);
        dataset.addSeries(p_5Wait);
        dataset.addSeries(p_95Wait);
        datasetrequ.addSeries(requests);
        JFreeChart chart = chartProfile(splitTrips.size(), dataset, "Waiting times", "Wait time (s)");
        JFreeChart chart2 = chartProfile(splitTrips.size(), datasetrequ, "Ride requests per hour",
                "Requests per hour (req/h)");
        ChartSaveUtils.saveAsPNG(chart, fileName, 1500, 1000);
        ChartSaveUtils.saveAsPNG(chart2, fileName + "_requests", 1500, 1000);

    } catch (IOException | ParseException e) {

        e.printStackTrace();
    }

}

From source file:org.londonsburning.proxy.ProxyPrinter.java

/**
 * <p>//from w  w w  .ja v a 2  s  .  com
 * writeDeck.
 * </p>
 *
 * @param deck           a String
 * @param file           a File
 * @param skipBasicLands Skip Basic Lands
 */
private void generateHtml(final Deck deck, final File file, final boolean skipBasicLands) {
    try {
        final BufferedWriter htmlOutput = new BufferedWriter(new FileWriter(file));

        Template template;

        final Configuration cfg = new Configuration();
        cfg.setClassForTemplateLoading(deck.getClass(), "/");
        cfg.setObjectWrapper(new DefaultObjectWrapper());
        template = cfg.getTemplate(this.proxyConfiguration.getOutputTemplate());

        final TreeMap<String, Object> root = new TreeMap<String, Object>();
        root.put("title", deck.getDeckName());
        final List<String> list = new ArrayList<String>();
        for (final String cardName : deck.getCardList().keySet()) {
            for (int i = 0; i <= (deck.getCardList().get(cardName) - 1); i++) {
                if (!this.proxyConfiguration.getBasicLandNames().contains(cardName) || !skipBasicLands) {
                    list.add(getURL(cardName));
                }
            }
        }

        final LinkedHashMap<String, Integer> map = deck.getCardList();
        root.put("urls", list);
        root.put("cardBorder", this.proxyConfiguration.getCard().getCardBorder());
        root.put("cardHeight", Math.round(this.proxyConfiguration.getCard().getCardHeight()
                * this.proxyConfiguration.getCard().getCardScale()));
        root.put("cardWidth", Math.round(this.proxyConfiguration.getCard().getCardWidth()
                * this.proxyConfiguration.getCard().getCardScale()));
        root.put("cardListWidth",
                this.proxyConfiguration.getCard().getCardWidth() - this.proxyConfiguration.getCardListWidth());
        root.put("cardList", map);
        root.put("numberOfCards", deck.getNumberOfCards());

        /* Merge data-model with template */
        template.process(root, htmlOutput);
        htmlOutput.flush();
        htmlOutput.close();
    } catch (final IOException e) {
        logger.debug(e.toString());
    } catch (final TemplateException e) {
        logger.debug(e.toString());
    } catch (final URISyntaxException e) {
        logger.debug(e.toString());
    } catch (final InterruptedException e) {
        logger.debug(e.toString());
    }
}

From source file:FileBaseDataMap.java

/**
 * put Method.<br>//  ww w.  j av a  2  s . c o m
 * 
 * @param key
 * @param value
 * @param hashCode This is a key value hash code
 */
public void put(String key, String value, int hashCode) {
    try {

        File file = dataFileList[hashCode % numberOfDataFiles];

        StringBuffer buf = new StringBuffer(this.fillCharacter(key, keyDataLength));
        buf.append(this.fillCharacter(value, oneDataLength));

        CacheContainer accessor = (CacheContainer) innerCache.get(file.getAbsolutePath());
        RandomAccessFile raf = null;
        BufferedWriter wr = null;

        if (accessor == null || accessor.isClosed == true) {

            raf = new RandomAccessFile(file, "rwd");
            wr = new BufferedWriter(new FileWriter(file, true));
            accessor = new CacheContainer();
            accessor.raf = raf;
            accessor.wr = wr;
            accessor.file = file;
            innerCache.put(file.getAbsolutePath(), accessor);
        } else {

            raf = accessor.raf;
            wr = accessor.wr;
        }

        // KeyData Write File
        for (int tryIdx = 0; tryIdx < 2; tryIdx++) {
            try {
                // Key??
                long dataLineNo = this.getLinePoint(key, raf);

                if (dataLineNo == -1) {

                    wr.write(buf.toString());
                    wr.flush();

                    // The size of an increment
                    this.totalSize.getAndIncrement();
                } else {

                    // ?????1
                    boolean increMentFlg = false;
                    if (this.get(key, hashCode) == null)
                        increMentFlg = true;

                    raf.seek(dataLineNo * (lineDataSize));
                    raf.write(buf.toString().getBytes(), 0, lineDataSize);
                    if (increMentFlg)
                        this.totalSize.getAndIncrement();
                }
                break;
            } catch (IOException ie) {

                // IOException???1????
                if (tryIdx == 1)
                    throw ie;
                try {

                    if (raf != null)
                        raf.close();
                    if (wr != null)
                        wr.close();

                    raf = new RandomAccessFile(file, "rwd");
                    wr = new BufferedWriter(new FileWriter(file, true));
                    accessor = new CacheContainer();
                    accessor.raf = raf;
                    accessor.wr = wr;
                    accessor.file = file;
                    innerCache.put(file.getAbsolutePath(), accessor);
                } catch (Exception e) {
                    throw e;
                }
            }
        }
    } catch (Exception e2) {
        e2.printStackTrace();
    }
}

From source file:net.sf.smbt.touchosc.utils.TouchOSCUtils.java

/**
 * Initialize UI model from a .jzml file
 * //from  ww  w .  j a v a  2 s.c  o m
 * @param zipTouchoscFilePath a .jzml file
 * 
 * @return UI model
 */
public TouchOscApp loadAppFromTouchOscXML(String zipTouchoscFilePath) {
    //
    // Create a resource set.
    //
    ResourceSet resourceSet = new ResourceSetImpl();

    IPath path = new Path(zipTouchoscFilePath);

    //
    // Register the default resource factory -- only needed for stand-alone!
    //
    resourceSet.getResourceFactoryRegistry().getExtensionToFactoryMap().put(TouchoscPackage.eNS_PREFIX,
            new TouchoscResourceFactoryImpl());
    resourceSet.getPackageRegistry().put(TouchoscPackage.eNS_URI, TouchoscPackage.eINSTANCE);
    resourceSet.getPackageRegistry().put(TouchoscappPackage.eNS_URI, TouchoscappPackage.eINSTANCE);

    List<String> touchoscFilePathList = new ArrayList<String>();
    try {
        FileInputStream touchoscFile = new FileInputStream(zipTouchoscFilePath);
        ZipInputStream fileIS = new ZipInputStream(touchoscFile);

        ZipEntry zEntry = null;
        while ((zEntry = fileIS.getNextEntry()) != null) {
            if (zEntry.getName().endsWith(".xml")) {
                touchoscFilePathList.add(path.removeLastSegments(1) + "/_" + path.lastSegment());
            }
            FileOutputStream os = new FileOutputStream(path.removeLastSegments(1) + "/_" + path.lastSegment());
            BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(os));
            BufferedReader reader = new BufferedReader(new InputStreamReader(fileIS, Charset.forName("UTF-8")));
            CharBuffer charBuffer = CharBuffer.allocate(65535);
            while (reader.read(charBuffer) != -1)

                charBuffer.append("</touchosc:TOP>\n");
            charBuffer.flip();

            String content = charBuffer.toString();
            content = content.replace("<touchosc>", "");
            content = content.replace("</touchosc>", "");
            content = content.replace("<?xml version=\"1.0\" encoding=\"UTF-8\"?>", TOUCHOSC_XMLNS_HEADER);
            content = content.replace("numberX=", "number_x=");
            content = content.replace("numberY=", "number_y=");
            content = content.replace("invertedX=", "inverted_x=");
            content = content.replace("invertedY=", "inverted_y=");
            content = content.replace("localOff=", "local_off=");
            content = content.replace("oscCs=", "osc_cs=");

            writer.write(content);
            writer.flush();
            os.flush();
            os.close();
        }
        fileIS.close();
    } catch (FileNotFoundException e1) {
        e1.printStackTrace();
    } catch (IOException e2) {
        e2.printStackTrace();
    }

    //
    // Get the URI of the model file.
    //
    URI touchoscURI = URI.createFileURI(touchoscFilePathList.get(0));

    //
    // Demand load the resource for this file.
    //
    Resource resource = resourceSet.getResource(touchoscURI, true);

    Object obj = (Object) resource.getContents().get(0);
    if (obj instanceof TOP) {
        TOP top = (TOP) obj;
        reverseZOrders(top);
        return initAppFromTouchOsc(top.getLayout(), "horizontal".equals(top.getLayout().getOrientation()),
                "0".equals(top.getLayout().getMode()));
    }
    return null;
}

From source file:com.baomidou.mybatisplus.generator.AutoGenerator.java

/**
 * Mapper/*  w ww .  j  a v  a 2 s .  c o m*/
 *
 * @param beanName
 * @param mapperName
 * @throws IOException
 */
protected void buildMapper(String beanName, String mapperName) throws IOException {
    File mapperFile = new File(PATH_MAPPER, mapperName + ".java");
    BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(mapperFile), "utf-8"));
    bw.write("package " + config.getMapperPackage() + ";");
    bw.newLine();
    bw.newLine();
    bw.write("import " + config.getEntityPackage() + "." + beanName + ";");
    bw.newLine();
    if (config.getConfigIdType() == ConfigIdType.STRING) {
        bw.write("import com.baomidou.mybatisplus.mapper.CommonMapper;");
    } else {
        bw.write("import com.baomidou.mybatisplus.mapper.AutoMapper;");
    }
    bw.newLine();

    bw = buildClassComment(bw, beanName + " ??");
    bw.newLine();
    if (config.getConfigIdType() == ConfigIdType.STRING) {
        bw.write("public interface " + mapperName + " extends CommonMapper<" + beanName + "> {");
    } else {
        bw.write("public interface " + mapperName + " extends AutoMapper<" + beanName + "> {");
    }
    bw.newLine();
    bw.newLine();

    // ----------mapperEnd----------
    bw.newLine();
    bw.write("}");
    bw.flush();
    bw.close();
}

From source file:com.healthmarketscience.jackcess.util.ExportUtil.java

/**
 * Copy a table in this database into a new delimited text file.
 * //from  w  w  w . ja  v a  2  s.c  o  m
 * @param cursor
 *          Cursor to export
 * @param out
 *          Writer to export to
 * @param header
 *          If <code>true</code> the first line contains the column names
 * @param delim
 *          The column delimiter, <code>null</code> for default (comma)
 * @param quote
 *          The quote character
 * @param filter
 *          valid export filter
 *
 * @see Builder
 */
public static void exportWriter(Cursor cursor, BufferedWriter out, boolean header, String delim, char quote,
        ExportFilter filter) throws IOException {
    String delimiter = (delim == null) ? DEFAULT_DELIMITER : delim;

    // create pattern which will indicate whether or not a value needs to be
    // quoted or not (contains delimiter, separator, or newline)
    Pattern needsQuotePattern = Pattern
            .compile("(?:" + Pattern.quote(delimiter) + ")|(?:" + Pattern.quote("" + quote) + ")|(?:[\n\r])");

    List<? extends Column> origCols = cursor.getTable().getColumns();
    List<Column> columns = new ArrayList<Column>(origCols);
    columns = filter.filterColumns(columns);

    Collection<String> columnNames = null;
    if (!origCols.equals(columns)) {

        // columns have been filtered
        columnNames = new HashSet<String>();
        for (Column c : columns) {
            columnNames.add(c.getName());
        }
    }

    // print the header row (if desired)
    if (header) {
        for (Iterator<Column> iter = columns.iterator(); iter.hasNext();) {

            writeValue(out, iter.next().getName(), quote, needsQuotePattern);

            if (iter.hasNext()) {
                out.write(delimiter);
            }
        }
        out.newLine();
    }

    // print the data rows
    Object[] unfilteredRowData = new Object[columns.size()];
    Row row;
    while ((row = cursor.getNextRow(columnNames)) != null) {

        // fill raw row data in array
        for (int i = 0; i < columns.size(); i++) {
            unfilteredRowData[i] = columns.get(i).getRowValue(row);
        }

        // apply filter
        Object[] rowData = filter.filterRow(unfilteredRowData);
        if (rowData == null) {
            continue;
        }

        // print row
        for (int i = 0; i < columns.size(); i++) {

            Object obj = rowData[i];
            if (obj != null) {

                String value = null;
                if (obj instanceof byte[]) {

                    value = ByteUtil.toHexString((byte[]) obj);

                } else {

                    value = String.valueOf(rowData[i]);
                }

                writeValue(out, value, quote, needsQuotePattern);
            }

            if (i < columns.size() - 1) {
                out.write(delimiter);
            }
        }

        out.newLine();
    }

    out.flush();
}

From source file:net.itransformers.utils.MyGraphMLWriter.java

/**
 * //  w  ww.  jav a  2s  . com
 * @param graph
 * @param w
 * @throws IOException 
 */
public void save(Hypergraph<V, E> graph, Writer w) throws IOException {
    BufferedWriter bw = new BufferedWriter(w);

    // write out boilerplate header
    bw.write("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n");
    bw.write("<graphml xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">  \n");

    // write out data specifiers, including defaults
    for (String key : graph_data.keySet())
        writeKeySpecification(key, "graph", graph_data.get(key), bw);
    for (String key : vertex_data.keySet())
        writeKeySpecification(key, "node", vertex_data.get(key), bw);
    for (String key : edge_data.keySet())
        writeKeySpecification(key, "edge", edge_data.get(key), bw);

    // write out graph-level information
    // set edge default direction
    bw.write("<graph edgedefault=\"");
    directed = !(graph instanceof UndirectedGraph);
    if (directed)
        bw.write("directed\">\n");
    else
        bw.write("undirected\">\n");

    // write graph description, if any
    String desc = graph_desc.transform(graph);
    if (desc != null)
        bw.write("<desc>" + desc + "</desc>\n");

    // write graph data out if any
    for (String key : graph_data.keySet()) {
        Transformer<Hypergraph<V, E>, ?> t = graph_data.get(key).transformer;
        Object value = t.transform(graph);
        if (value != null)
            bw.write(format("data", "key", key, value.toString()) + "\n");
    }

    // write vertex information
    writeVertexData(graph, bw);

    // write edge information
    writeEdgeData(graph, bw);

    // close graph
    bw.write("</graph>\n");
    bw.write("</graphml>\n");
    bw.flush();

    bw.close();
}

From source file:org.matsim.analysis.IterationStopWatch.java

/**
 * Writes the gathered data tab-separated into a text file.
 *
 * @param filename The name of a file where to write the gathered data.
 */// w w w. j a  v  a2s .com
public void writeTextFile(final String filename) {

    try {
        BufferedWriter writer = IOUtils.getBufferedWriter(filename + ".txt");

        // print header
        writer.write("Iteration");
        for (String identifier : this.identifiers) {
            writer.write('\t');
            writer.write(identifier);
        }
        writer.write('\t');
        for (String identifier : this.operations) {
            writer.write('\t');
            writer.write(identifier);
        }
        writer.newLine();

        // print data
        for (Map.Entry<Integer, Map<String, Long>> entry : this.iterations.entrySet()) {
            Integer iteration = entry.getKey();
            Map<String, Long> data = entry.getValue();
            // iteration
            writer.write(iteration.toString());
            // identifiers
            for (String identifier : this.identifiers) {
                Long time = data.get(identifier);
                writer.write('\t');
                writer.write(formatMilliTime(time));
            }
            // blank separator
            writer.write('\t');
            // durations of operations
            for (String identifier : this.operations) {
                Long startTime = data.get("BEGIN " + identifier);
                Long endTime = data.get("END " + identifier);
                writer.write('\t');
                if (startTime != null && endTime != null) {
                    double diff = (endTime.longValue() - startTime.longValue()) / 1000.0;
                    writer.write(Time.writeTime(diff));
                }
            }

            // finish
            writer.newLine();
        }
        writer.flush();
        writer.close();
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}

From source file:gov.nih.nci.ncicb.tcga.dcc.dam.processors.FilePackager.java

private String writeDCCReadMe() throws IOException {
    Boolean mafFileFound = false;
    for (final DataFile df : filePackagerBean.getSelectedFiles()) {
        if (df.isMafFile()) {
            mafFileFound = true;//from   w ww. j  a  v  a 2  s . c o  m
            break;
        }

    }
    BufferedWriter writer = null;
    try {
        if (mafFileFound) {
            final String tempDir = getTempfileDirectory();
            if (tempDir == null || tempDir.length() == 0) {
                throw new IOException("FilePackagerFactory.tempfileDirectory is null");
            }
            String readmeFileTempName = tempDir + ConstantValues.SEPARATOR + UUID.randomUUID();
            writer = new BufferedWriter(new FileWriter(readmeFileTempName));
            final StringBuilder line = new StringBuilder();
            line.append("README for Data Matrix generated archive which contains maf files.\n\n"
                    + "TCGA MAF files must contain 34 defined columns, however centers are free to add additional columns. Since MAF files created using the Data Matrix contain only the 34 required columns and do not contain any of the additional columns,"
                    + " this can result in the appearance of duplicate rows. If your MAF file appears to have duplicate rows, check the original MAF file and see if there are additional columns of information.\n"
                    + "In addition, MAF files generated through the DAM will include new columns indicating the MAF file the entry came from, the archive name and the row number of the entry in the MAF file (if known).");
            writer.write(line.toString());
            writer.flush();
            return readmeFileTempName;
        }

    } finally {
        if (writer != null) {
            writer.close();
        }
    }
    return null;
}