Example usage for java.io PrintStream PrintStream

List of usage examples for java.io PrintStream PrintStream

Introduction

In this page you can find the example usage for java.io PrintStream PrintStream.

Prototype

public PrintStream(File file) throws FileNotFoundException 

Source Link

Document

Creates a new print stream, without automatic line flushing, with the specified file.

Usage

From source file:com.cyberway.issue.io.arc.ARC2WCDX.java

public static Object[] createWcdx(ARCReader reader) {
    reader.setDigest(true);//from www.j  av a  2  s .  com

    String wcdxPath = reader.getReaderIdentifier().replaceAll("\\.arc(\\.gz)?$", ".wcdx.gz");
    File wcdxFile = new File(wcdxPath + ".open");
    PrintStream writer = null;
    long count = 0;
    try {
        writer = new PrintStream(new GZIPOutputStream(new FileOutputStream(wcdxFile)));

        // write header: legend + timestamp
        StringBuilder legend = new StringBuilder();
        appendField(legend, "CDX");
        appendField(legend, "surt-uri");
        appendField(legend, "b"); // ARC timestamp
        appendField(legend, "http-date");
        appendField(legend, "s"); // status code
        appendField(legend, "m"); // media type
        appendField(legend, "sha1"); // content sha1
        appendField(legend, "g"); // ARC name
        appendField(legend, "V"); // start offset
        appendField(legend, "end-offset"); // TODO: implement
        appendField(legend, "n"); // ARC record length TODO: verify
        appendField(legend, "http-content-length");
        appendField(legend, "http-last-modified");
        appendField(legend, "http-expires");
        appendField(legend, "http-etag");
        appendField(legend, "http-location");
        appendField(legend, "e"); // IP
        appendField(legend, "a"); // original URL
        // WCDX version+creation time: crude version control
        appendField(legend, WCDX_VERSION + "@" + ArchiveUtils.get14DigitDate());
        writer.println(legend.toString());

        Iterator iter = reader.iterator();
        count = 0;
        while (iter.hasNext()) {
            ARCRecord record = (ARCRecord) iter.next();
            record.close();
            ARCRecordMetaData h = (ARCRecordMetaData) record.getHeader();
            Header[] httpHeaders = record.getHttpHeaders();
            if (httpHeaders == null) {
                httpHeaders = new Header[0];
            }
            HeaderGroup hg = new HeaderGroup();
            hg.setHeaders(httpHeaders);
            StringBuilder builder = new StringBuilder();

            // SURT-form URI
            appendField(builder, SURT.fromURI(h.getUrl()));
            // record timestamp ('b')
            appendField(builder, h.getDate());
            // http header date
            appendTimeField(builder, hg.getFirstHeader("Date"));
            // response code ('s')
            appendField(builder, h.getStatusCode());
            // media type ('m')
            appendField(builder, h.getMimetype());
            // content checksum (like 'c', but here Base32 SHA1)
            appendField(builder, record.getDigestStr());
            // arc name ('g')
            appendField(builder, reader.getFileName());
            // compressed start offset ('V')
            appendField(builder, h.getOffset());

            // compressed end offset (?)
            //            appendField(builder,
            //                    reader.getInputStream() instanceof RepositionableStream
            //                    ? ((GzippedInputStream)reader.getInputStream()).vPosition()
            //                    : "-");
            // TODO; leave unavail for now
            appendField(builder, "-");

            // uncompressed (declared in ARC headerline) record length
            appendField(builder, h.getLength());
            // http header content-length
            appendField(builder, hg.getFirstHeader("Content-Length"));

            // http header mod-date
            appendTimeField(builder, hg.getFirstHeader("Last-Modified"));
            // http header expires
            appendTimeField(builder, hg.getFirstHeader("Expires"));

            // http header etag
            appendField(builder, hg.getFirstHeader("ETag"));
            // http header redirect ('Location' header?)
            appendField(builder, hg.getFirstHeader("Location"));
            // ip ('e')
            appendField(builder, h.getIp());
            // original URI
            appendField(builder, h.getUrl());
            // TODO MAYBE - a title from inside content? 

            writer.println(builder.toString());
            count++;
        }
        wcdxFile.renameTo(new File(wcdxPath));
    } catch (IOException e) {
        // soldier on: but leave '.open' wcdx file as indicator of error
        if (!wcdxFile.exists()) {
            try {
                wcdxFile.createNewFile();
            } catch (IOException e1) {
                // TODO Auto-generated catch block
                throw new RuntimeException(e1);
            }
        }
    } catch (RuntimeException e) {
        // soldier on: but leave '.open' wcdx file as indicator of error
        if (!wcdxFile.exists()) {
            try {
                wcdxFile.createNewFile();
            } catch (IOException e1) {
                // TODO Auto-generated catch block
                throw new RuntimeException(e1);
            }
        }
    } finally {
        if (writer != null) {
            writer.close();
        }
    }

    return new Object[] { wcdxPath, count };
}

From source file:com.jstar.eclipse.services.ConsoleService.java

public PrintStream getConsoleStream() {
    MessageConsole myConsole = getConsole();
    MessageConsoleStream out = myConsole.newMessageStream();
    return new PrintStream(out);
}

From source file:gaffer.accumulo.utils.IngestUtils.java

/**
 * Get the existing splits from a table in Accumulo and write a splits file.
 * The number of splits is returned./* w ww. ja  v a2s  . com*/
 * 
 * @param conn  An existing connection to an Accumulo instance
 * @param table  The table name
 * @param fs  The FileSystem in which to create the splits file
 * @param splitsFile  A path for the splits file
 * @return The number of splits in the table
 * @throws TableNotFoundException
 * @throws IOException
 */
public static int createSplitsFile(Connector conn, String table, FileSystem fs, Path splitsFile)
        throws TableNotFoundException, IOException {
    // Get the splits from the table
    Collection<Text> splits = conn.tableOperations().getSplits(table);

    // Write the splits to file
    if (splits.isEmpty()) {
        return 0;
    }
    PrintStream out = new PrintStream(new BufferedOutputStream(fs.create(splitsFile, true)));
    for (Text split : splits) {
        out.println(new String(Base64.encodeBase64(split.getBytes())));
    }
    out.close();

    return splits.size();
}

From source file:at.ac.tuwien.big.moea.print.PopulationWriter.java

@Override
public String write(final Iterable<S> population) {
    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
    final PrintStream ps = new PrintStream(baos);
    write(ps, population);/*from  w  ww  . j  av  a  2 s  .c om*/
    ps.close();
    try {
        return baos.toString("UTF-8");
    } catch (final UnsupportedEncodingException e) {
        return e.getMessage();
    }
}

From source file:com.kixeye.chassis.bootstrap.configuration.file.PropertiesFileConfigurationWriter.java

@Override
public void write(Configuration configuration, Filter filter) {
    try (FileOutputStream fos = new FileOutputStream(outputFile)) {
        dump(Configurations.asMap(configuration), filter, new PrintStream(fos));
    } catch (Exception e) {
        LOGGER.error("Unable to write configs to file {}", outputFile);
    }//from   w  w  w  .j a  v  a2 s . co  m
}

From source file:com.moscona.dataSpace.ExportHelper.java

public void csvExport(DataFrame df, String fileName, boolean includeMetaData)
        throws FileNotFoundException, DataSpaceException {
    // FIXME exports sorted and label columns twice - once populated and once not - the populated ones are the wrong ones
    PrintStream out = new PrintStream(new File(fileName));
    try {//from w  w w.  j a  v  a2 s  . co  m
        ArrayList<String> labels = new ArrayList<String>();
        ArrayList<String> sorted = new ArrayList<String>();

        for (String col : df.getColumnNames()) {
            if (df.isLabel(col)) {
                labels.add(col);
            }
            if (df.get(col).isSorted()) {
                sorted.add(col);
            }
        }

        if (includeMetaData) {
            csvOut(out, "name", df.getName());
            csvOut(out, "description", df.getDescription());
            csvOut(out, "row ID", df.getRowId());
            csvOut(out, "sort column", df.getSortColumn());

            Collections.sort(labels);
            Collections.sort(sorted);

            out.println(excelQuote("label columns") + "," + StringUtils.join(labels, ","));
            out.println(excelQuote("sorted columns") + "," + StringUtils.join(sorted, ","));
            out.println();
        }

        ArrayList<String> columns = new ArrayList<String>();
        ArrayList<String> remaining = new ArrayList<String>(df.getColumnNames());
        if (df.getRowId() != null) {
            // make first column the row ID
            String rowId = df.getRowId();
            columns.add(rowId);
            remaining.remove(rowId);
        }
        // add all the sorted columns
        columns.addAll(sorted);
        remaining.removeAll(sorted);
        remaining.removeAll(labels); // those will come in last
        Collections.sort(remaining);
        columns.addAll(remaining);
        columns.addAll(labels);

        out.println(StringUtils.join(columns, ","));
        IVectorIterator<Map<String, IScalar>> iterator = df.iterator();
        while (iterator.hasNext()) {
            Map<String, IScalar> row = iterator.next();
            ArrayList<String> values = new ArrayList<String>();
            for (String col : columns) {
                values.add(toCsvString(row.get(col)));
            }
            out.println(StringUtils.join(values, ","));
        }
    } finally {
        out.close();
    }
}

From source file:edu.stanford.muse.graph.directed.Digraph.java

private void setOut(String file) throws FileNotFoundException {
    out = new PrintStream(file);
}

From source file:com.googlecode.android_scripting.jsonrpc.JsonRpcServerTest.java

public void testValidHandshake() throws IOException, JSONException {
    JsonRpcServer server = new JsonRpcServer(null, "foo");
    InetSocketAddress address = server.startLocal(0);
    Socket client = new Socket();
    client.connect(address);//  w ww .j a v a2s. c  om
    PrintStream out = new PrintStream(client.getOutputStream());
    out.println(buildRequest(0, "_authenticate", Lists.newArrayList("foo")));
    BufferedReader in = new BufferedReader(new InputStreamReader(client.getInputStream()));
    JSONObject response = new JSONObject(in.readLine());
    Object error = response.get("error");
    assertEquals(JSONObject.NULL, error);
    client.close();
    server.shutdown();
}

From source file:com.adaptris.util.stream.Slf4jLoggingOutputStreamTest.java

@Test
public void testLogDebug() throws Exception {
    PrintStream out = new PrintStream(new Slf4jLoggingOutputStream(LogLevel.DEBUG));
    out.println(TEXT);// w w w  . j av  a 2s .  com
    out.flush();
    out.close();
}

From source file:com.sangupta.jerry.print.ConsoleTableWriter.java

/**
 * Output the data of the table as a JSON
 * // ww  w . j  av a 2 s  .c  om
 * @param table
 *            the {@link ConsoleTable} to output
 * 
 * @param out
 *            the {@link PrintWriter} to write to
 */
public void writeJson(ConsoleTable table, PrintWriter writer) {
    OutputStream os = new WriterOutputStream(writer);
    PrintStream ps = new PrintStream(os);
    try {
        writeJson(table, ps);
    } finally {
        ps.close();
    }
}