Example usage for java.io Writer flush

List of usage examples for java.io Writer flush

Introduction

In this page you can find the example usage for java.io Writer flush.

Prototype

public abstract void flush() throws IOException;

Source Link

Document

Flushes the stream.

Usage

From source file:com.excilys.ebi.gatling.recorder.ui.component.RunningFrame.java

private void saveScenario() {
    VelocityEngine ve = new VelocityEngine();
    ve.setProperty("file.resource.loader.class", ClasspathResourceLoader.class.getName());
    ve.init();/*from  ww  w  .jav a2s  .  co  m*/

    VelocityContext context = new VelocityContext();
    context.put("protocol", protocol);
    context.put("host", host);
    context.put("port", port);
    context.put("urlBase", urlBaseString);
    context.put("proxy", configuration.getProxy());
    context.put("urls", urls);
    context.put("headers", headers);
    context.put("name", "Scenario name");

    if (listEvents.size() > EVENTS_GROUPING) {
        List<List<Object>> subListsEvents = new ArrayList<List<Object>>();
        int numberOfSubLists = listEvents.size() / EVENTS_GROUPING + 1;
        for (int i = 0; i < numberOfSubLists; i++)
            subListsEvents.add(listEvents.subList(0 + EVENTS_GROUPING * i,
                    Math.min(EVENTS_GROUPING * (i + 1), listEvents.size() - 1)));

        context.put("chainEvents", subListsEvents);
        context.put("events", new ArrayList<Object>());
    } else {
        context.put("events", listEvents);
        context.put("chainEvents", new ArrayList<List<Object>>());
    }

    context.put("package", Configuration.getInstance().getIdePackage());
    context.put("date", ResultType.FORMAT.format(startDate));
    URI uri = URI.create("");
    context.put("URI", uri);

    Template template = null;
    Writer writer = null;
    for (ResultType resultType : configuration.getResultTypes()) {
        try {
            template = ve.getTemplate(resultType.getTemplate());
            writer = new OutputStreamWriter(
                    new FileOutputStream(
                            new File(getOutputFolder(), resultType.getScenarioFileName(startDate))),
                    configuration.getEncoding());
            template.merge(context, writer);
            writer.flush();

        } catch (IOException e) {
            logger.error("Error, while saving '" + resultType + "' scenario..." + e.getStackTrace());

        } finally {
            closeQuietly(writer);
        }
    }
}

From source file:com.mirth.connect.donkey.test.util.TestUtils.java

public static String getPerformanceText(int numDestinations, long milliseconds, List<Long> times)
        throws IOException {
    double seconds = ((double) milliseconds) / 1000d;
    double speed = ((double) times.size()) / seconds;

    long sum = 0;
    Writer writer = null;

    if (PERFORMANCE_LOG_FILE != null) {
        writer = new BufferedWriter(new FileWriter(new File(PERFORMANCE_LOG_FILE)));
    }/*from  ww w .ja va  2 s .c  o m*/

    for (Long time : times) {
        sum += time;

        if (writer != null) {
            writer.append(time + "\n");
        }
    }

    if (writer != null) {
        writer.flush();
    }

    Collections.sort(times);

    StringBuilder stringBuilder = new StringBuilder();
    final int padding = 17;

    stringBuilder.append(StringUtils.rightPad("Messages Sent:", padding) + times.size() + "\n");
    stringBuilder.append(
            StringUtils.rightPad("Inbound:", padding) + Precision.round(speed, 2) + " messages/second\n");
    stringBuilder.append(StringUtils.rightPad("Outbound:", padding)
            + Precision.round(speed * numDestinations, 2) + " messages/second\n");

    if (times.size() > 0) {
        stringBuilder.append(StringUtils.rightPad("Lowest Time:", padding) + times.get(0) + "ms\n");
        stringBuilder
                .append(StringUtils.rightPad("Highest Time:", padding) + times.get(times.size() - 1) + "ms\n");
        stringBuilder
                .append(StringUtils.rightPad("Median Time:", padding) + times.get(times.size() / 2) + "ms\n");
        stringBuilder.append(StringUtils.rightPad("Average Time:", padding)
                + Precision.round((double) sum / (double) times.size(), 2) + "ms\n");
        stringBuilder.append(StringUtils.rightPad("Total Send Time:", padding) + sum + "ms\n");
        stringBuilder.append(StringUtils.rightPad("Total Test Time:", padding) + milliseconds + "ms\n");
    }

    return stringBuilder.toString();
}

From source file:org.ala.util.CsvMerger.java

private void mergeCsv(String input1, String input2, String outputDir, int keyIdx1, int keyIdx2)
        throws Exception {
    List<String[]> fieldsList1 = new ArrayList<String[]>();
    List<String[]> fieldsList2 = new ArrayList<String[]>();

    System.out.println("Merging...");

    Writer writer = getWriter(outputDir);

    CSVReader r1 = getCsvReader(input1);
    CSVReader r2 = getCsvReader(input2);
    String[] fields1 = null;/*from  www  .  j a  v a2  s. co m*/
    String[] fields2 = null;

    fields1 = r1.readNext();
    fields2 = r2.readNext();

    if (fields1 != null && fields2 != null) {
        writeCsv(fields1, fields2, keyIdx2, writer);
    }

    while ((fields1 = r1.readNext()) != null) {
        fieldsList1.add(fields1);
    }

    while ((fields2 = r2.readNext()) != null) {
        fieldsList2.add(fields2);
    }

    for (String[] fl1 : fieldsList1) {
        String key1 = fl1[keyIdx1];

        if (key1 != null && !"".equals(key1)) {
            for (String[] fl2 : fieldsList2) {
                String key2 = fl2[keyIdx2];

                if (key1.equals(key2)) {
                    writeCsv(fl1, fl2, keyIdx2, writer);
                }
            }
        }
    }
    writer.flush();
    writer.close();
}

From source file:gov.nih.nci.ncicb.tcga.dcc.dam.dao.DAMQueriesCGCCLevelTwoAndThree.java

/**
 * Given a list of fileinfo objects, adds path information to those objects.
 * Generates the files in a temporary location.
 * Returns nothing.//from   w w w  .  j  a  v  a2  s.c  o m
 *
 * @param selectedFiles the list of selected data files
 */
public void addPathsToSelectedFiles(final List<DataFile> selectedFiles)
        throws DataAccessMatrixQueries.DAMQueriesException {
    if (tempfileDirectory == null) {
        throw new DataAccessMatrixQueries.DAMQueriesException("No tempfileDirectory specified");
    }
    if (!(new File(tempfileDirectory)).exists()) {
        throw new DataAccessMatrixQueries.DAMQueriesException("Directory does not exist " + tempfileDirectory);
    }
    for (final DataFile df : selectedFiles) { //for each file selected from the tree
        if (shouldGenerateFile(df)) {
            DiseaseContextHolder.setDisease(df.getDiseaseType());
            final DataFileLevelTwoThree file = (DataFileLevelTwoThree) df;

            // the temporary file to write to.
            // the bean expects this to be set before leaving this method.
            final String uniqueName = getUniqueFilename(file);
            final String path = tempfileDirectory + File.separator + uniqueName;
            file.setPath(path);
            long starttime_writer;
            long endtime_writer;
            Writer writerPointer = null; // other writer reference needs to be final for inner class
            Writer writer = null;
            try {
                starttime_writer = System.currentTimeMillis();
                logger.logToLogger(Level.DEBUG,
                        "Level" + getDataLevel() + ": Creating writer at " + starttime_writer);
                //noinspection IOResourceOpenedButNotSafelyClosed
                writer = new BufferedWriter(new FileWriter(path), SIXTY_FOUR_MEGS);
                writerPointer = writer;
                generateFile((DataFileLevelTwoThree) df, writer);
                writer.flush();
                writer.close();
                writerPointer = null;
                endtime_writer = System.currentTimeMillis();
                logger.logToLogger(Level.DEBUG,
                        "Level" + getDataLevel() + ": Closed writer at " + endtime_writer);
                logger.logToLogger(Level.DEBUG,
                        "Level" + getDataLevel() + ": Total millisecs during which writer was alive: "
                                + (endtime_writer - starttime_writer));

            } catch (IOException e) {
                new ErrorInfo(e); //logs itself
                throw new DAMQueriesException(e);
            } catch (DataAccessException e) {
                new ErrorInfo(e);
                throw new DAMQueriesException(e);
            } finally {
                if (writerPointer != null) {
                    //will only happen if some exception was thrown, in which case we don't care
                    //whether the buffer has been flushed - just make sure the stream is closed
                    try {
                        writerPointer.close();
                    } catch (IOException e) {
                        logger.logToLogger(Level.WARN, "Could not close writer: was already closed.");
                    }
                }
                IOUtils.closeQuietly(writer);
            }
        }
    }
}

From source file:ch.randelshofer.cubetwister.HTMLExporter.java

/**
 * Processes a cube template./*w ww.j  av a2 s  . com*/
 *
 * @param filenameTemplate The template for the filenameTemplate. It must contain
 * the substring ${cube}
 * @param tokens An input stream for reading the contents of the template.
 */
private void processCubeTemplate(String filenameTemplate, String[] tokens) throws IOException {
    String placeholder = "${cube}";
    int plh = filenameTemplate.indexOf(placeholder);
    String filenamePrefix = (plh == 0) ? "" : filenameTemplate.substring(0, plh);
    String filenameSuffix = (plh >= filenameTemplate.length() - placeholder.length()) ? ""
            : filenameTemplate.substring(plh + placeholder.length());

    for (EntityModel node : model.getCubes().getChildren()) {
        p.setProgress(p.getProgress() + 1);
        CubeModel m = (CubeModel) node;
        stack.push(new StackEntry());
        putCubeData(m, "");
        p.setNote("Exporting " + ids.get(m) + filenameSuffix + " ...");
        putNextEntry(filenamePrefix + ids.get(m) + filenameSuffix);
        Writer w = new OutputStreamWriter(entryOut, "UTF8");
        writeData(w, tokens, 0, tokens.length);
        w.flush();
        stack.pop();
        closeEntry();
    }
}

From source file:ch.randelshofer.cubetwister.HTMLExporter.java

/**
 * Processes a notation template.//from   w ww.j  a  va 2 s. c o  m
 *
 * @param filenameTemplate The filenameTemplate of the template. The filenameTemplate must contain the substring
 * ${notation}
 * @param tokens An input stream for reading the contents of the template.
 */
private void processNotationTemplate(String filenameTemplate, String[] tokens) throws IOException {
    String placeholder = "${notation}";
    int plh = filenameTemplate.indexOf(placeholder);
    String filenamePrefix = (plh == 0) ? "" : filenameTemplate.substring(0, plh);
    String filenameSuffix = (plh >= filenameTemplate.length() - placeholder.length()) ? ""
            : filenameTemplate.substring(plh + placeholder.length());

    for (EntityModel node : model.getNotations().getChildren()) {
        p.setProgress(p.getProgress() + 1);
        NotationModel m = (NotationModel) node;
        stack.push(new StackEntry());
        putNotationData(m, "");
        p.setNote("Exporting " + ids.get(m) + filenameSuffix + " ...");
        putNextEntry(filenamePrefix + ids.get(m) + filenameSuffix);
        Writer w = new OutputStreamWriter(entryOut, "UTF8");
        writeData(w, tokens, 0, tokens.length);
        w.flush();
        stack.pop();
        closeEntry();
    }
}

From source file:ch.randelshofer.cubetwister.HTMLExporter.java

/**
 * Processes a script template.//w  w w.  j a  va2  s.  co m
 *
 * @param filename The filename of the template. The filename must contain the substring
 * ${script}
 * @param tokens An input stream for reading the contents of the template.
 */
private void processScriptTemplate(String filename, String[] tokens) throws IOException {
    String placeholder = "${script}";
    int plh = filename.indexOf(placeholder);
    String filenamePrefix = (plh == 0) ? "" : filename.substring(0, plh);
    String filenameSuffix = (plh >= filename.length() - placeholder.length()) ? ""
            : filename.substring(plh + placeholder.length());

    for (EntityModel node : model.getScripts().getChildren()) {
        p.setProgress(p.getProgress() + 1);
        ScriptModel m = (ScriptModel) node;
        stack.push(new StackEntry());
        putScriptData(m);
        p.setNote("Exporting " + ids.get(m) + filenameSuffix + " ...");
        putNextEntry(filenamePrefix + ids.get(m) + filenameSuffix);
        Writer w = new OutputStreamWriter(entryOut, "UTF8");
        writeData(w, tokens, 0, tokens.length);
        w.flush();
        closeEntry();
        stack.pop();
    }
}

From source file:ch.randelshofer.cubetwister.HTMLExporter.java

/**
 * Processes a note template.//w  w  w .ja  v  a  2  s  . com
 *
 * @param filename The filename of the template. The filename must contain the substring
 * ${note}
 * @param tokens An input stream for reading the contents of the template.
 */
private void processNoteTemplate(String filename, String[] tokens) throws IOException {
    String placeholder = "${note}";
    int plh = filename.indexOf(placeholder);
    String filenamePrefix = (plh == 0) ? "" : filename.substring(0, plh);
    String filenameSuffix = (plh >= filename.length() - placeholder.length()) ? ""
            : filename.substring(plh + placeholder.length());

    for (EntityModel node : model.getTexts().getChildren()) {
        p.setProgress(p.getProgress() + 1);
        TextModel m = (TextModel) node;
        stack.push(new StackEntry());
        putNoteData(m);
        p.setNote("Exporting " + ids.get(m) + filenameSuffix + " ...");
        putNextEntry(filenamePrefix + ids.get(m) + filenameSuffix);
        Writer w = new OutputStreamWriter(entryOut, "UTF8");
        writeData(w, tokens, 0, tokens.length);
        w.flush();
        closeEntry();
        stack.pop();
    }
}

From source file:net.cit.tetrad.resource.SubResource.java

@SuppressWarnings("unchecked")
@RequestMapping("/shardChunkLstCommand.do")
public void shardChunkLstCommand(HttpServletRequest request, HttpServletResponse response) throws Exception {
    int deviceCode = Integer.parseInt(Utility.isNullNumber(request.getParameter("deviceCode")));
    String dbNameParam = request.getParameter("dbNameParam");

    int pageNumber = Integer.parseInt(Utility.isNullNumber(request.getParameter("iDisplayStart")));
    int nPerPage = Integer.parseInt(Utility.isNullNumber(request.getParameter("iDisplayLength")));
    log.debug("pageNumber=" + pageNumber + ", nPerPage=" + nPerPage);

    Enumeration parameter = request.getParameterNames();
    log.debug(parameter.toString());//  w ww .  j a  va 2 s.  c o  m
    while (parameter.hasMoreElements()) {
        String pName = (String) parameter.nextElement();
        String pValue = request.getParameter(pName);
        log.debug(pName + " = " + pValue);
    }

    int sEcho = Integer.parseInt(Utility.isNullNumber(request.getParameter(REQ_SECHO)));

    List<Object> resultLst = new ArrayList<Object>();
    //??  pageNumber 0?  chunkResultLst .
    if (pageNumber == 0) {
        List<Object> findMongoCollections = cllectionCommand(deviceCode, "collections");
        JSONObject js = new JSONObject();
        for (Object collObj : findMongoCollections) {
            String coll = (String) ((Map<String, Object>) collObj).get("_id");
            String[] collSplit = coll.split("\\.");
            if (dbNameParam.equals(collSplit[0])) {
                List<Object> lst = comandService.chunksGrpCommand(deviceCode, coll, new ArrayList<Object>());
                js = new JSONObject();
                List<Object> chunkInfoGroup = new ArrayList<Object>();
                double totalChunkCnt = 0;
                String collName = null;
                double nChuncks = 0;
                for (Object lstObj : lst) {
                    DBObject dbo = (DBObject) lstObj;
                    collName = (String) dbo.get("collName");
                    nChuncks = (Double) dbo.get("nChunks");
                    totalChunkCnt += nChuncks;
                    chunkInfoGroup.add(dbo);
                }
                js.put("collName", collName);
                js.put("totalChunkCnt", totalChunkCnt);
                js.put("chunkInfoGroup", chunkInfoGroup);
                resultLst.add(js);
            }
        }
        chunkResultLst = resultLst;
    }

    int chunkResultLstSize = chunkResultLst.size();
    //chunkResultLst? ? pageNumber nPerPage? ? ?? ?  ? .
    int count = pageNumber + nPerPage;
    resultLst = new ArrayList<Object>();
    for (int i = pageNumber; i < count; i++) {
        if (chunkResultLstSize <= i)
            break;
        resultLst.add(chunkResultLst.get(i));
    }

    PersonJson result = new PersonJson();
    result.setAaData(resultLst);
    result.setsEcho(sEcho);
    result.setiTotalRecords(chunkResultLstSize);
    result.setiTotalDisplayRecords(chunkResultLstSize);

    JSONObject jsonObject = JSONObject.fromObject(result);

    Writer writer = setResponse(response).getWriter();
    writer.write(jsonObject.toString());

    writer.flush();

    log.debug("end - shardChunkLstCommand()");
}

From source file:au.org.ala.layers.dao.ObjectDAOImpl.java

public void writeObjectsToCSV(OutputStream output, String fid) throws Exception {
    String sql = MessageFormat.format("COPY (select o.pid as pid, o.id as id, o.name as name, "
            + "o.desc as description, " + "ST_AsText(ST_Centroid(o.the_geom)) as centroid, "
            + "GeometryType(o.the_geom) as featureType from objects o "
            + "where o.fid = ''{0}'') TO STDOUT WITH CSV HEADER", fid);

    DataSource ds = (DataSource) applicationContext.getBean("dataSource");
    Connection conn = DataSourceUtils.getConnection(ds);

    try {/*  w  w  w.  ja  v a  2  s .  c  o m*/
        BaseConnection baseConn = (BaseConnection) new C3P0NativeJdbcExtractor().getNativeConnection(conn);
        Writer csvOutput = new OutputStreamWriter(output);
        CopyManager copyManager = new CopyManager(baseConn);
        copyManager.copyOut(sql, csvOutput);
        csvOutput.flush();
        conn.close();
    } catch (SQLException ex) {
        // something has failed and we print a stack trace to analyse the error
        logger.error(ex.getMessage(), ex);
        // ignore failure closing connection
        try {
            conn.close();
        } catch (SQLException e) {
            /*do nothing for failure to close */ }
    } finally {
        // properly release our connection
        DataSourceUtils.releaseConnection(conn, ds);
    }
}