Example usage for java.io BufferedWriter append

List of usage examples for java.io BufferedWriter append

Introduction

In this page you can find the example usage for java.io BufferedWriter append.

Prototype

public Writer append(CharSequence csq) throws IOException 

Source Link

Document

Appends the specified character sequence to this writer.

Usage

From source file:br.bireme.ngrams.NGrams.java

public static void export(NGIndex index, final NGSchema schema, final String outFile,
        final String outFileEncoding) throws IOException {
    if (index == null) {
        throw new NullPointerException("index");
    }/* w  ww .  ja  va 2s .  co m*/
    if (schema == null) {
        throw new NullPointerException("schema");
    }
    if (outFile == null) {
        throw new NullPointerException("outFile");
    }
    if (outFileEncoding == null) {
        throw new NullPointerException("outFileEncoding");
    }
    final Parameters parameters = schema.getParameters();
    final TreeMap<Integer, String> fields = new TreeMap<>();
    final IndexReader reader = index.getIndexSearcher().getIndexReader();
    final int maxdoc = reader.maxDoc();
    //final Bits liveDocs = MultiFields.getLiveDocs(reader);
    final Bits liveDocs = MultiBits.getLiveDocs(reader);
    final BufferedWriter writer = Files.newBufferedWriter(Paths.get(outFile), Charset.forName(outFileEncoding),
            StandardOpenOption.CREATE, StandardOpenOption.WRITE);

    boolean first = true;

    for (Map.Entry<Integer, br.bireme.ngrams.Field> entry : parameters.sfields.entrySet()) {
        fields.put(entry.getKey(), entry.getValue().name + NOT_NORMALIZED_FLD);
    }

    for (int docID = 0; docID < maxdoc; docID++) {
        if ((liveDocs != null) && (!liveDocs.get(docID)))
            continue;
        final Document doc = reader.document(docID);

        if (first) {
            first = false;
        } else {
            writer.newLine();
        }
        writer.append(doc2pipe(doc, fields));
    }
    writer.close();
    reader.close();
}

From source file:de.dfki.km.perspecting.obie.experiments.RelevanceRatingExperiment.java

@Test
public void avgCorrelations() throws Exception {

    String[] corpora = new String[] { "wikinews", "wikipedia", "gutenberg", "bbc_music", "bbc_nature" };

    for (String corpus : corpora) {

        final String path = "/home/adrian/Dokumente/diss/scoobie/results/dbpedia_en2/correlation/" + corpus
                + "/";
        // final String type = "pearson.csv";
        final String[] types = new String[] { "pearson.csv", "spearman.csv" };

        for (final String type : types) {

            File folder = new File(path);
            final File[] files = folder.listFiles(new FileFilter() {

                @Override/*from ww  w  .j ava  2  s.com*/
                public boolean accept(File pathname) {
                    return pathname.getName().endsWith(type);
                }
            });

            DoubleMatrix2D m = new DenseDoubleMatrix2D(9, 9);
            m.assign(new DoubleFunction() {
                @Override
                public double apply(double arg0) {
                    return 0;
                }
            });

            for (File file : files) {
                BufferedReader br = new BufferedReader(new FileReader(file));
                br.readLine();
                int row = 0;

                try {
                    for (String line = br.readLine(); line != null; line = br.readLine()) {
                        String[] items = line.split("\t");

                        for (int col = 0; col < 9; col++) {
                            double d = Double.parseDouble(items[col]);
                            if (Double.isNaN(d))
                                d = 0.0;
                            if (d < 0.001)
                                d = 0.0;
                            if (d > 1)
                                d = 1;
                            m.set(row, col, m.get(row, col) + d);
                        }

                        row++;
                        if (row == 9)
                            break;
                    }
                } catch (Exception e) {
                    throw new Exception(file.getName(), e);
                }
                br.close();
                // System.out.println(m);
            }

            final double count = m.get(0, 0);

            m.assign(new DoubleFunction() {
                @Override
                public double apply(double arg0) {
                    return arg0 / count;
                }
            });

            BufferedWriter w = new BufferedWriter(new FileWriter(
                    "/home/adrian/Dokumente/diss/scoobie/results/heatmap." + corpus + "." + type + ".gnup"));
            w.append("set terminal svg size 600,600 dynamic enhanced fname 'times'  fsize 12 butt solid\n");
            w.append("set output 'heatmaps." + corpus + "." + type + ".svg'\n");
            w.append("unset key\n");
            w.append("set view map\n");
            w.append("set style data linespoints\n");
            w.append("set xtics border in scale 0,0 mirror norotate  offset character 0, 0, 0\n");
            w.append("set ytics border in scale 0,0 mirror norotate  offset character 0, 0, 0\n");
            w.append("set xrange [ -0.500000 : 8.50000 ] noreverse nowriteback\n");
            w.append("set yrange [ -0.500000 : 8.50000 ] reverse nowriteback\n");
            w.append("set palette rgbformulae 2, -7, -7\n");
            w.append("splot '-' matrix with image\n");
            Formatter f = new Formatter();
            f.setFormat("%1.1f");
            f.setColumnSeparator(" ");
            w.append("#");
            w.append(f.toString(m));
            w.close();

            // FileUtils.writeStringToFile(new
            // File("/home/adrian/Dokumente/diss/scoobie/results/bbc_wildlife/correlation/"
            // + doc[doc.length - 1] + ".pearson.csv"),
            // f.toString(matrix.pearsonCorrelationDoubleMatrix()),
            // "utf-8");
        }
    }
}

From source file:org.forgerock.openidm.audit.impl.CSVAuditLogger.java

private void writeEntry(BufferedWriter fileWriter, String type, File auditFile, Map<String, Object> obj,
        Collection<String> fieldOrder) throws IOException {

    String key = null;//from  ww  w. j  a v  a 2 s .  c  om
    Iterator<String> iter = fieldOrder.iterator();
    while (iter.hasNext()) {
        key = iter.next();
        Object value = obj.get(key);
        fileWriter.append("\"");
        if (value != null) {
            if (value instanceof Map) {
                value = new JsonValue((Map) value).toString();
            }
            String rawStr = value.toString();
            // Escape quotes with double quotes
            String escapedStr = rawStr.replaceAll("\"", "\"\"");
            fileWriter.append(escapedStr);
        }
        fileWriter.append("\"");
        if (iter.hasNext()) {
            fileWriter.append(",");
        }
    }
    fileWriter.append(recordDelim);
}

From source file:org.talend.dataprofiler.core.migration.impl.MergeMetadataTask.java

/**
 * DOC Use replace method to migrate from 400 to 410.
 * // w w w  .ja  va2s.co  m
 * @param result
 * @param migFolder
 * @param acceptFileExtentionNames
 * @param replaceStringMap
 * @return
 */
private boolean migrateFolder(File migFolder, final String[] acceptFileExtentionNames,
        Map<String, String> replaceStringMap) {

    ArrayList<File> fileList = new ArrayList<File>();
    getAllFilesFromFolder(migFolder, fileList, new FilenameFilter() {

        public boolean accept(File dir, String name) {
            for (String extName : acceptFileExtentionNames) {
                if (name.endsWith(extName)) {
                    return true;
                }
            }
            return false;
        }
    });
    log.info("-------------- Migrating " + fileList.size() + " files"); //$NON-NLS-1$ //$NON-NLS-2$

    int counter = 0;
    int errorCounter = 0;
    Throwable error = null;

    for (File sample : fileList) {
        log.info("-------------- Migrating (" + counter++ + ") : " + sample.getAbsolutePath()); //$NON-NLS-1$ //$NON-NLS-2$
        try {
            BufferedReader fileReader = new BufferedReader(new FileReader(sample));
            BufferedWriter fileWriter = new BufferedWriter(
                    new FileWriter(new File(sample.getAbsolutePath() + MIGRATION_FILE_EXT)));

            while (fileReader.ready()) {
                String line = fileReader.readLine();
                for (String key : replaceStringMap.keySet()) {
                    line = line.replaceAll(key, replaceStringMap.get(key));
                }
                fileWriter.append(line);
                fileWriter.newLine();
            }

            fileWriter.flush();
            fileWriter.close();
            fileWriter = null;
            fileReader.close();
            fileReader = null;
            // We must show called garbage collection,if set fileReader and fileWriter,then don't clear memory,will
            // warn a
            // message is file is in using.
            System.gc();
        } catch (Exception e) {
            error = e;
            errorCounter++;
            log.error("!!!!!!!!!!!  Error transforming (" + sample.getAbsolutePath() + ")\n" + e.getMessage(), //$NON-NLS-1$//$NON-NLS-2$
                    e);
        }
        log.info("-------------- Migration done of " + counter + " files" //$NON-NLS-1$ //$NON-NLS-2$
                + (errorCounter != 0 ? (",  there are " + errorCounter + " files in error.") : ".")); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
    }

    if (error != null) {
        return false;
    } else {
        // remove original files and rename new ones to old ones
        for (File sample : fileList) {
            boolean isDeleted = sample.delete();
            log.info(sample.getAbsolutePath() + (isDeleted ? " is deleted." : " failed to delete.")); //$NON-NLS-1$ //$NON-NLS-2$
            boolean isrenamed = new File(sample.getAbsolutePath() + MIGRATION_FILE_EXT).renameTo(sample);
            log.info(sample.getAbsolutePath() + MIGRATION_FILE_EXT
                    + (isrenamed ? " is renamed." : " failed to rename.")); //$NON-NLS-1$ //$NON-NLS-2$
        }
    }

    return true;
}

From source file:org.caboclo.util.Credentials.java

/**
 * Save cloud service credentials as an encrypted string of characters
 *
 * @param currentServer The cloud storage provider for which we want to save
 * the credentials/* ww w. java2  s.  co m*/
 * @param cred The plain-text credentials that will be saved on file
 */
public void saveCredentials(String currentServer, String cred) {
    StringBuilder strPath = new StringBuilder();
    strPath.append(System.getProperty("user.home")).append(java.io.File.separator).append("backupcredentials");

    BufferedWriter strout = null;

    try {
        //Create file if it does not exist
        File credentialsFile = new File(strPath.toString());
        if (!credentialsFile.exists()) {
            credentialsFile.createNewFile();
        }
        byte[] cypherCred = encryptCredentials(cred);
        strout = new BufferedWriter(new FileWriter(credentialsFile, true));
        String encodedCred = new String(new Base64().encode(cypherCred));
        String line = currentServer + ":" + encodedCred;
        strout.append(line);
        strout.newLine();
    } catch (IOException ex) {
        Logger.getLogger(this.getClass().getName()).log(Level.SEVERE, null, ex);
    } catch (IllegalBlockSizeException ex) {
        Logger.getLogger(this.getClass().getName()).log(Level.SEVERE, null, ex);
    } catch (BadPaddingException ex) {
        Logger.getLogger(this.getClass().getName()).log(Level.SEVERE, null, ex);
    } catch (InvalidKeyException ex) {
        Logger.getLogger(this.getClass().getName()).log(Level.SEVERE, null, ex);
    } catch (NoSuchPaddingException ex) {
        Logger.getLogger(this.getClass().getName()).log(Level.SEVERE, null, ex);
    } catch (NoSuchAlgorithmException ex) {
        Logger.getLogger(this.getClass().getName()).log(Level.SEVERE, null, ex);
    } finally {
        try {
            if (strout != null) {
                strout.flush();
                strout.close();
            }
        } catch (IOException ex) {
            Logger.getLogger(this.getClass().getName()).log(Level.SEVERE, null, ex);
        }
    }
}

From source file:MSUmpire.LCMSPeakStructure.LCMSPeakDIAMS2.java

private void PrepareMGF_MS1Cluster(LCMSPeakMS1 ms1lcms) throws IOException {

    ArrayList<PseudoMSMSProcessing> ScanList = new ArrayList<>();
    ExecutorService executorPool = Executors.newFixedThreadPool(NoCPUs);
    for (PeakCluster ms1cluster : ms1lcms.PeakClusters) {
        final ArrayList<PrecursorFragmentPairEdge> frags = FragmentsClu2Cur.get(ms1cluster.Index);
        if (frags != null && DIA_MZ_Range.getX() <= ms1cluster.GetMaxMz()
                && DIA_MZ_Range.getY() >= ms1cluster.TargetMz()) {
            //            if (DIA_MZ_Range.getX() <= ms1cluster.GetMaxMz() && DIA_MZ_Range.getY() >= ms1cluster.TargetMz() && FragmentsClu2Cur.containsKey(ms1cluster.Index)) {
            //                ArrayList<PrecursorFragmentPairEdge> frags = FragmentsClu2Cur.get(ms1cluster.Index);
            ms1cluster.GroupedFragmentPeaks.addAll(frags);
            if (Last_MZ_Range == null || Last_MZ_Range.getY() < ms1cluster.TargetMz()) {
                PseudoMSMSProcessing mSMSProcessing = new PseudoMSMSProcessing(ms1cluster, parameter);
                ScanList.add(mSMSProcessing);
            }//from   w  ww .ja  v a  2  s .c om
        }
    }

    for (PseudoMSMSProcessing proc : ScanList) {
        executorPool.execute(proc);
    }
    executorPool.shutdown();

    try {
        executorPool.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
    } catch (InterruptedException e) {
        Logger.getRootLogger().info("interrupted..");
    }

    String mgffile = FilenameUtils.getFullPath(ParentmzXMLName) + GetQ1Name() + ".mgf.temp";
    String mgffile2 = FilenameUtils.getFullPath(ParentmzXMLName) + GetQ2Name() + ".mgf.temp";
    //        FileWriter mapwriter = new FileWriter(FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName) + ".ScanClusterMapping_Q1", true);
    //        FileWriter mapwriter2 = new FileWriter(FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName) + ".ScanClusterMapping_Q2", true);

    //        FileWriter mgfWriter = new FileWriter(mgffile, true);
    //        FileWriter mgfWriter2 = new FileWriter(mgffile2, true);
    final BufferedWriter mapwriter = DIAPack.get_file(DIAPack.OutputFile.ScanClusterMapping_Q1,
            FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName)
                    + ".ScanClusterMapping_Q1"),
            mapwriter2 = DIAPack.get_file(DIAPack.OutputFile.ScanClusterMapping_Q2,
                    FilenameUtils.getFullPath(ParentmzXMLName) + FilenameUtils.getBaseName(ParentmzXMLName)
                            + ".ScanClusterMapping_Q2"),
            mgfWriter = DIAPack.get_file(DIAPack.OutputFile.Mgf_Q1, mgffile),
            mgfWriter2 = DIAPack.get_file(DIAPack.OutputFile.Mgf_Q2, mgffile2);

    for (PseudoMSMSProcessing mSMSProcessing : ScanList) {
        if (MatchedFragmentMap.size() > 0) {
            mSMSProcessing.RemoveMatchedFrag(MatchedFragmentMap);
        }

        XYPointCollection Scan = mSMSProcessing.GetScan();

        if (Scan != null && Scan.PointCount() > parameter.MinFrag) {
            //                StringBuilder mgfString = new StringBuilder();

            if (mSMSProcessing.Precursorcluster.IsotopeComplete(3)) {
                final BufferedWriter mgfString = mgfWriter;
                parentDIA.Q1Scan++;
                mgfString.append("BEGIN IONS\n");
                mgfString.append("PEPMASS=").append(String.valueOf(mSMSProcessing.Precursorcluster.TargetMz()))
                        .append("\n");
                mgfString.append("CHARGE=").append(String.valueOf(mSMSProcessing.Precursorcluster.Charge))
                        .append("+\n");
                mgfString.append("RTINSECONDS=")
                        .append(String.valueOf(mSMSProcessing.Precursorcluster.PeakHeightRT[0] * 60f))
                        .append("\n");
                mgfString.append("TITLE=").append(GetQ1Name()).append(".")
                        .append(String.valueOf(parentDIA.Q1Scan)).append(".")
                        .append(String.valueOf(parentDIA.Q1Scan)).append(".")
                        .append(String.valueOf(mSMSProcessing.Precursorcluster.Charge)).append("\n");
                for (int i = 0; i < Scan.PointCount(); i++) {
                    mgfString.append(String.valueOf(Scan.Data.get(i).getX())).append(" ")
                            .append(String.valueOf(Scan.Data.get(i).getY())).append("\n");
                }
                mgfString.append("END IONS\n\n");
                mapwriter.write(parentDIA.Q1Scan + "_" + mSMSProcessing.Precursorcluster.Index + "\n");
                //                    mgfWriter.write(mgfString.toString());
                //} else if (mSMSProcessing.Precursorcluster.IsotopeComplete(2)) {
            } else {
                final BufferedWriter mgfString = mgfWriter2;
                parentDIA.Q2Scan++;
                mgfString.append("BEGIN IONS\n");
                mgfString.append("PEPMASS=").append(String.valueOf(mSMSProcessing.Precursorcluster.TargetMz()))
                        .append("\n");
                mgfString.append("CHARGE=").append(String.valueOf(mSMSProcessing.Precursorcluster.Charge))
                        .append("+\n");
                mgfString.append("RTINSECONDS=")
                        .append(String.valueOf(mSMSProcessing.Precursorcluster.PeakHeightRT[0] * 60f))
                        .append("\n");
                mgfString.append("TITLE=").append(GetQ2Name()).append(".")
                        .append(String.valueOf(parentDIA.Q2Scan)).append(".")
                        .append(String.valueOf(parentDIA.Q2Scan)).append(".")
                        .append(String.valueOf(mSMSProcessing.Precursorcluster.Charge)).append("\n");
                for (int i = 0; i < Scan.PointCount(); i++) {
                    mgfString.append(String.valueOf(Scan.Data.get(i).getX())).append(" ")
                            .append(String.valueOf(Scan.Data.get(i).getY())).append("\n");
                }
                mgfString.append("END IONS\n\n");
                mapwriter2.write(parentDIA.Q2Scan + "_" + mSMSProcessing.Precursorcluster.Index + "\n");
                //                    mgfWriter2.write(mgfString.toString());
            }
        }
        mSMSProcessing.Precursorcluster.GroupedFragmentPeaks.clear();
    }
    //        mgfWriter2.close();
    //        mgfWriter.close();
    //        mapwriter.close();
    //        mapwriter2.close();
}

From source file:com.oneops.boo.BooConfigInterpolator.java

private String readFileToString(File inputFile) throws IOException {
    BufferedReader bufrdr = new BufferedReader(new FileReader(inputFile));
    StringWriter strWr = new StringWriter();
    BufferedWriter wr = new BufferedWriter(strWr);

    try {/*from   w  ww  .  jav a  2s  .  c o  m*/
        char[] copyBuffer = new char[1024];
        int readChars;

        do {
            readChars = bufrdr.read(copyBuffer, 0, 1024);
            if (readChars != -1) {
                boolean addNewLine = false;

                for (int charIndex = 0; charIndex < readChars; charIndex++) {
                    char thisChar = copyBuffer[charIndex];
                    if (thisChar == '\n') {
                        // Append the newline to the output. Add
                        // another newline before the next character.
                        wr.append('\n');
                        addNewLine = true;
                    } else {
                        if (addNewLine) {
                            wr.append('\n');
                            addNewLine = false;
                        }
                        wr.append(thisChar);
                    }
                }
            }
        } while (readChars != -1);
    } finally {
        bufrdr.close();
        wr.close();
    }

    return strWr.toString();
}

From source file:org.ihtsdo.statistics.Processor.java

/**
 * Prints the report.//from www .j  ava 2 s  .c o  m
 *
 * @param bw the bw
 * @param tableMap the table map
 * @throws Exception the exception
 */
private void printReport(BufferedWriter bw, OutputFileTableMap tableMap) throws Exception {

    SQLStatementExecutor executor = new SQLStatementExecutor(connection);

    for (SelectTableMap select : tableMap.getSelect()) {
        String query = "Select * from " + select.getTableName();
        if (executor.executeQuery(query, null)) {
            ResultSet rs = executor.getResultSet();

            if (rs != null) {
                ResultSetMetaData meta = rs.getMetaData();
                while (rs.next()) {
                    for (int i = 0; i < meta.getColumnCount(); i++) {
                        bw.append(rs.getObject(i + 1).toString());
                        if (i + 1 < meta.getColumnCount()) {
                            bw.append(",");
                        } else {
                            bw.append("\r\n");
                        }
                    }
                }

                meta = null;
                rs.close();
            }
        }
    }
    executor = null;
}

From source file:tds.websim.presentation.services.WebSimXHR.java

private static void writeCsvReportToResponse(SimReport report, HttpServletResponse response, String fileName)
        throws IOException {
    response.setContentType("text/csv");
    response.addHeader("Content-Disposition", "attachment;filename=" + fileName);

    // TODO: Shiva, what's the encoding for the CSV report output?
    BufferedWriter responseWriter = new BufferedWriter(
            new OutputStreamWriter(response.getOutputStream(), "UTF-8"));

    for (Table reportTable : report.getTables()) {
        StringBuilder sb = new StringBuilder();

        for (String tableHeader : reportTable.getTableHeaders()) {
            sb.append(tableHeader);//from   ww w. ja v a 2 s  . c o m
            sb.append(",");
        }
        responseWriter.append(sb.toString());
        responseWriter.newLine();

        for (TableRow tableRow : reportTable.getTableRows()) {
            sb = new StringBuilder();
            for (String colVal : tableRow.getColVals()) {
                sb.append(colVal);
                sb.append(",");
            }
            responseWriter.append(sb.toString());
            responseWriter.newLine();
        }
        responseWriter.newLine();
    }

    responseWriter.flush();
    responseWriter.close();
}