Example usage for java.io PrintStream close

List of usage examples for java.io PrintStream close

Introduction

In this page you can find the example usage for java.io PrintStream close.

Prototype

public void close() 

Source Link

Document

Closes the stream.

Usage

From source file:com.medlog.webservice.lifecycle.Security.java

private void sendProcessingError(Throwable t, ServletResponse response) {
    String stackTrace = getStackTrace(t);

    if (stackTrace != null && !stackTrace.equals("")) {
        try {//w  ww  . j  a  v a 2  s.c o  m
            response.setContentType("text/html");
            PrintStream ps = new PrintStream(response.getOutputStream());
            PrintWriter pw = new PrintWriter(ps);
            pw.print("<html>\n<head>\n<title>Error</title>\n</head>\n<body>\n"); //NOI18N

            // PENDING! Localize this for next official release
            pw.print("<h1>The resource did not process correctly</h1>\n<pre>\n");
            pw.print(stackTrace);
            pw.print("</pre></body>\n</html>"); //NOI18N
            pw.close();
            ps.close();
            response.getOutputStream().close();
        } catch (Exception ex) {
        }
    } else {
        try {
            PrintStream ps = new PrintStream(response.getOutputStream());
            t.printStackTrace(ps);
            ps.close();
            response.getOutputStream().close();
        } catch (Exception ex) {
        }
    }
}

From source file:org.apache.pig.test.TestFilterOpString.java

@Test
public void testStringGte() throws Throwable {
    File tmpFile = File.createTempFile("test", "txt");
    PrintStream ps = new PrintStream(new FileOutputStream(tmpFile));
    int expectedCount = 0;
    for (int i = 0; i < LOOP_COUNT; i++) {
        if (i % 5 == 0) {
            ps.println("b:a");
            expectedCount++;/*from  ww  w .  j a v  a2  s  .c  o m*/
        } else if (i % 3 == 0) {
            ps.println("b:b");
            expectedCount++;
        } else {
            ps.println("a:b");
            // test with nulls
            ps.println("a:");
            ps.println(":b");
            ps.println(":");
        }
    }
    ps.close();

    pig.registerQuery("A=load '" + Util.encodeEscape(Util.generateURI(tmpFile.toString(), pig.getPigContext()))
            + "' using " + PigStorage.class.getName() + "(':');");
    String query = "A = filter A by $0 gte $1;";

    log.info(query);
    pig.registerQuery(query);
    Iterator<Tuple> it = pig.openIterator("A");
    tmpFile.delete();
    int count = 0;
    while (it.hasNext()) {
        Tuple t = it.next();
        String first = t.get(0).toString();
        String second = t.get(1).toString();
        assertTrue(first.compareTo(second) >= 0);
        count++;
    }
    assertEquals(expectedCount, count);
}

From source file:org.apache.pig.test.TestFilterOpString.java

@Test
public void testStringLte() throws Throwable {
    File tmpFile = File.createTempFile("test", "txt");
    PrintStream ps = new PrintStream(new FileOutputStream(tmpFile));
    int expectedCount = 0;
    for (int i = 0; i < LOOP_COUNT; i++) {
        if (i % 5 == 0) {
            ps.println("b:a");
            // test with nulls
            ps.println("a:");
            ps.println(":b");
            ps.println(":");
        } else if (i % 3 == 0) {
            ps.println("b:b");
            expectedCount++;/*from  w  ww  .j  a v a 2s .  c  om*/
        } else {
            ps.println("a:b");
            expectedCount++;
        }
    }
    ps.close();

    pig.registerQuery("A=load '" + Util.encodeEscape(Util.generateURI(tmpFile.toString(), pig.getPigContext()))
            + "' using " + PigStorage.class.getName() + "(':');");
    String query = "A = filter A by $0 lte $1;";

    log.info(query);
    pig.registerQuery(query);
    Iterator<Tuple> it = pig.openIterator("A");
    tmpFile.delete();
    int count = 0;
    while (it.hasNext()) {
        Tuple t = it.next();
        String first = t.get(0).toString();
        String second = t.get(1).toString();
        assertTrue(first.compareTo(second) <= 0);
        count++;
    }
    assertEquals(expectedCount, count);
}

From source file:gaffer.accumulo.splitpoints.EstimateSplitPointsDriver.java

@Override
public int run(String[] args) throws Exception {

    if (args.length < 5) {
        System.err.println("Usage: " + this.getClass().getName()
                + " <mapred_output_directory> <proportion_to_sample> <number_of_tablet_servers> <resulting_split_file> <input_path1>...");
        return 1;
    }/*from  w w  w  . ja v a 2s.co  m*/

    // Parse arguments
    Path outputPath = new Path(args[0]);
    float proportionToSample = Float.parseFloat(args[1]);
    int numberTabletServers = Integer.parseInt(args[2]);
    Path resultingSplitsFile = new Path(args[3]);
    Path[] inputPaths = new Path[args.length - 4];
    for (int i = 0; i < inputPaths.length; i++) {
        inputPaths[i] = new Path(args[i + 4]);
    }

    // Conf and job
    Configuration conf = getConf();
    conf.setFloat("proportion_to_sample", proportionToSample);
    String jobName = "Estimate split points: input = ";
    for (int i = 0; i < inputPaths.length; i++) {
        jobName += inputPaths[i] + ", ";
    }
    jobName += "output = " + outputPath;
    Job job = Job.getInstance(conf, jobName);
    job.setJarByClass(getClass());

    // Input
    job.setInputFormatClass(SequenceFileInputFormat.class);
    for (int i = 0; i < inputPaths.length; i++) {
        SequenceFileInputFormat.addInputPath(job, inputPaths[i]);
    }

    // Mapper
    job.setMapperClass(EstimateSplitPointsMapper.class);
    job.setMapOutputKeyClass(Key.class);
    job.setMapOutputValueClass(Value.class);

    // Reducer
    job.setReducerClass(EstimateSplitPointsReducer.class);
    job.setOutputKeyClass(Key.class);
    job.setOutputValueClass(Value.class);
    job.setNumReduceTasks(1);

    // Output
    job.setOutputFormatClass(SequenceFileOutputFormat.class);
    SequenceFileOutputFormat.setOutputPath(job, outputPath);
    SequenceFileOutputFormat.setCompressOutput(job, true);
    SequenceFileOutputFormat.setOutputCompressorClass(job, GzipCodec.class);
    SequenceFileOutputFormat.setOutputCompressionType(job, CompressionType.BLOCK);

    // Run job
    job.waitForCompletion(true);

    // Successful?
    if (!job.isSuccessful()) {
        System.err.println("Error running job");
        return 1;
    }

    // Number of records output
    // NB In the following line use mapred.Task.Counter.REDUCE_OUTPUT_RECORDS rather than
    // mapreduce.TaskCounter.REDUCE_OUTPUT_RECORDS as this is more compatible with earlier
    // versions of Hadoop.
    @SuppressWarnings("deprecation")
    Counter counter = job.getCounters()
            .findCounter(org.apache.hadoop.mapred.Task.Counter.REDUCE_OUTPUT_RECORDS);
    long recordsOutput = counter.getValue();
    System.out.println("Number of records output = " + recordsOutput);

    // Work out when to output a split point. The number of split points
    // needed is the number of tablet servers minus 1 (because you don't
    // have to output the start of the first tablet or the end of the
    // last tablet).
    long outputEveryNthRecord = recordsOutput / (numberTabletServers - 1);

    // Read through resulting file, pick out the split points and write to
    // file.
    FileSystem fs = FileSystem.get(conf);
    Path resultsFile = new Path(outputPath, "part-r-00000");
    @SuppressWarnings("deprecation")
    SequenceFile.Reader reader = new SequenceFile.Reader(fs, resultsFile, conf);
    PrintStream splitsWriter = new PrintStream(new BufferedOutputStream(fs.create(resultingSplitsFile, true)));
    Key key = new Key();
    Value value = new Value();
    long count = 0;
    int numberSplitPointsOutput = 0;
    while (reader.next(key, value) && numberSplitPointsOutput < numberTabletServers - 1) {
        count++;
        if (count % outputEveryNthRecord == 0) {
            numberSplitPointsOutput++;
            splitsWriter.println(new String(Base64.encodeBase64(key.getRow().getBytes())));
            System.out.println("Written split point: " + key.getRow());
        }
    }
    reader.close();
    splitsWriter.close();
    System.out.println("Number of split points output = " + numberSplitPointsOutput);
    return 0;
}

From source file:fr.cs.examples.bodies.Phasing.java

private void run(final File input)
        throws IOException, IllegalArgumentException, ParseException, OrekitException {

    // read input parameters
    KeyValueFileParser<ParameterKey> parser = new KeyValueFileParser<ParameterKey>(ParameterKey.class);
    parser.parseInput(new FileInputStream(input));
    TimeScale utc = TimeScalesFactory.getUTC();

    // simulation properties
    AbsoluteDate date = parser.getDate(ParameterKey.ORBIT_DATE, utc);
    int nbOrbits = parser.getInt(ParameterKey.PHASING_ORBITS_NUMBER);
    int nbDays = parser.getInt(ParameterKey.PHASING_DAYS_NUMBER);
    double latitude = parser.getAngle(ParameterKey.SUN_SYNCHRONOUS_REFERENCE_LATITUDE);
    boolean ascending = parser.getBoolean(ParameterKey.SUN_SYNCHRONOUS_REFERENCE_ASCENDING);
    double mst = parser.getTime(ParameterKey.SUN_SYNCHRONOUS_MEAN_SOLAR_TIME).getSecondsInDay() / 3600;
    int degree = parser.getInt(ParameterKey.GRAVITY_FIELD_DEGREE);
    int order = parser.getInt(ParameterKey.GRAVITY_FIELD_ORDER);
    String gridOutput = parser.getString(ParameterKey.GRID_OUTPUT);
    double[] gridLatitudes = new double[] { parser.getAngle(ParameterKey.GRID_LATITUDE_1),
            parser.getAngle(ParameterKey.GRID_LATITUDE_2), parser.getAngle(ParameterKey.GRID_LATITUDE_3),
            parser.getAngle(ParameterKey.GRID_LATITUDE_4), parser.getAngle(ParameterKey.GRID_LATITUDE_5) };
    boolean[] gridAscending = new boolean[] { parser.getBoolean(ParameterKey.GRID_ASCENDING_1),
            parser.getBoolean(ParameterKey.GRID_ASCENDING_2), parser.getBoolean(ParameterKey.GRID_ASCENDING_3),
            parser.getBoolean(ParameterKey.GRID_ASCENDING_4),
            parser.getBoolean(ParameterKey.GRID_ASCENDING_5) };

    gravityField = GravityFieldFactory.getNormalizedProvider(degree, order);

    // initial guess for orbit
    CircularOrbit orbit = guessOrbit(date, FramesFactory.getEME2000(), nbOrbits, nbDays, latitude, ascending,
            mst);/*w w  w.ja  v a  2s  .  c om*/
    System.out.println("initial orbit: " + orbit);
    System.out.println("please wait while orbit is adjusted...");
    System.out.println();

    // numerical model for improving orbit
    double[][] tolerances = NumericalPropagator.tolerances(0.1, orbit, OrbitType.CIRCULAR);
    DormandPrince853Integrator integrator = new DormandPrince853Integrator(1.0e-4 * orbit.getKeplerianPeriod(),
            1.0e-1 * orbit.getKeplerianPeriod(), tolerances[0], tolerances[1]);
    integrator.setInitialStepSize(1.0e-2 * orbit.getKeplerianPeriod());
    NumericalPropagator propagator = new NumericalPropagator(integrator);
    propagator.addForceModel(new HolmesFeatherstoneAttractionModel(
            FramesFactory.getGTOD(IERSConventions.IERS_2010, true), gravityField));
    propagator.addForceModel(new ThirdBodyAttraction(CelestialBodyFactory.getSun()));
    propagator.addForceModel(new ThirdBodyAttraction(CelestialBodyFactory.getMoon()));

    double deltaP = Double.POSITIVE_INFINITY;
    double deltaV = Double.POSITIVE_INFINITY;

    int counter = 0;
    DecimalFormat f = new DecimalFormat("0.000E00", new DecimalFormatSymbols(Locale.US));
    while (deltaP > 3.0e-1 || deltaV > 3.0e-4) {

        CircularOrbit previous = orbit;

        CircularOrbit tmp1 = improveEarthPhasing(previous, nbOrbits, nbDays, propagator);
        CircularOrbit tmp2 = improveSunSynchronization(tmp1, nbOrbits * tmp1.getKeplerianPeriod(), latitude,
                ascending, mst, propagator);
        orbit = improveFrozenEccentricity(tmp2, nbOrbits * tmp2.getKeplerianPeriod(), propagator);
        double da = orbit.getA() - previous.getA();
        double dex = orbit.getCircularEx() - previous.getCircularEx();
        double dey = orbit.getCircularEy() - previous.getCircularEy();
        double di = FastMath.toDegrees(orbit.getI() - previous.getI());
        double dr = FastMath.toDegrees(
                orbit.getRightAscensionOfAscendingNode() - previous.getRightAscensionOfAscendingNode());
        System.out.println(" iteration " + (++counter) + ": deltaA = " + f.format(da) + " m, deltaEx = "
                + f.format(dex) + ", deltaEy = " + f.format(dey) + ", deltaI = " + f.format(di)
                + " deg, deltaRAAN = " + f.format(dr) + " deg");

        PVCoordinates delta = new PVCoordinates(previous.getPVCoordinates(), orbit.getPVCoordinates());
        deltaP = delta.getPosition().getNorm();
        deltaV = delta.getVelocity().getNorm();

    }

    // final orbit
    System.out.println();
    System.out.println("final orbit (osculating): " + orbit);

    // generate the ground track grid file
    PrintStream output = new PrintStream(new File(input.getParent(), gridOutput));
    for (int i = 0; i < gridLatitudes.length; ++i) {
        printGridPoints(output, gridLatitudes[i], gridAscending[i], orbit, propagator, nbOrbits);
    }
    output.close();

}

From source file:com.google.cloud.dataflow.sdk.runners.worker.TextReaderTest.java

private File createFileWithCompressionType(String[] lines, String filename, CompressionType compressionType)
        throws IOException {
    File tmpFile = tmpFolder.newFile(filename);
    PrintStream writer = new PrintStream(
            getOutputStreamForCompressionType(new FileOutputStream(tmpFile), compressionType));
    for (String line : lines) {
        writer.println(line);// w ww  . java  2 s.c o  m
    }
    writer.close();
    return tmpFile;
}

From source file:de.ailis.wlandsuite.WebExtract.java

/**
 * Extracts the animations./*from   www  .j av  a  2 s . c om*/
 *
 * @param sourceDirectory
 *            The input directory
 * @param targetDirectory
 *            The output directory
 * @throws IOException
 *             When file operation fails.
 */

private void extractAnimations(final File sourceDirectory, final File targetDirectory) throws IOException {
    // Extract tilesets
    final File animsDirectory = new File(new File(targetDirectory, "images"), "animations");
    animsDirectory.mkdirs();

    for (int gameId = 1; gameId <= 2; gameId++) {
        final String filename = "allpics" + gameId;

        log.info("Reading " + filename);
        final Pics pics;
        final InputStream stream = new FileInputStream(new File(sourceDirectory, filename));
        try {
            pics = Pics.read(stream);
        } finally {
            stream.close();
        }

        int i = 0;
        for (final PicsAnimation animation : pics.getAnimations()) {
            log.info("Writing pic " + i);
            final File animDirectory = new File(animsDirectory, String.format("%d%02d", gameId, i));
            animDirectory.mkdirs();

            final TransparentEgaImage baseFrame = new TransparentEgaImage(
                    this.scaleFilter.scale(animation.getBaseFrame()));

            int layerId = 1;
            for (final PicsAnimationFrameSet frameSet : animation.getFrameSets()) {
                final List<Pic> frames = frameSet.getFrames();
                final List<PicsAnimationInstruction> instructions = frameSet.getInstructions();
                final GifAnimWriter gif = new GifAnimWriter(new File(animDirectory, "layer" + layerId + ".gif"),
                        0);
                try {
                    gif.setTransparentIndex(0);
                    gif.setDelay(instructions.get(0).getDelay() * 50);
                    TransparentEgaImage current = baseFrame;
                    if (layerId == 1)
                        gif.addFrame(current);
                    else
                        gif.addFrame(new TransparentEgaImage(baseFrame.getWidth(), baseFrame.getHeight()));
                    for (int j = 0; j < instructions.size(); j++) {
                        final PicsAnimationInstruction instruction = instructions.get(j);
                        final int frameIndex = instruction.getFrame();
                        final int delay = instructions.get((j + 1) % instructions.size()).getDelay();
                        final TransparentEgaImage frame = frameIndex == 0 ? baseFrame
                                : new TransparentEgaImage(this.scaleFilter.scale(frames.get(frameIndex - 1)));
                        gif.setDelay(delay * 50);
                        gif.addFrame(current.getDiff(frame));
                        current = frame;
                    }
                } finally {
                    gif.close();
                }
                layerId++;
            }

            final File htmlFile = new File(animDirectory, "index.html");
            final PrintStream html = new PrintStream(htmlFile);
            html.println("<html>");
            html.println("<body>");
            html.println("<div style=\"position:relative\">");
            html.println("<img src=\"layer1.gif\" />");
            for (int j = 2; j < layerId; j++) {
                html.println("<img src=\"layer" + j + ".gif\" style=\"position:absolute;left:0;top:0\" />");
            }
            html.println("</div>");
            html.println("</body>");
            html.println("</html>");
            html.close();

            i++;
        }
    }
}

From source file:eu.scape_project.cdx_creator.CDXCreationTask.java

public void createIndex() {
    FileInputStream fileInputStream = null;
    ArchiveReader reader = null;/* w ww .  j  a  v a 2s  . c o m*/
    FileOutputStream outputStream = null;
    try {
        fileInputStream = new FileInputStream(archiveFile);
        reader = ArchiveReaderFactory.getReader(fileInputStream, this.archiveFileName);
        reader.setComputePayloadDigest(config.isCreatePayloadDigest());
        List<CdxArchiveRecord> cdxArchRecords = new ArrayList<CdxArchiveRecord>();
        while (reader.hasNext()) {
            ArchiveRecord archRec = (ArchiveRecord) reader.next();
            CdxArchiveRecord cdxArchRec = CdxArchiveRecord.fromArchiveRecord(archRec);
            cdxArchRec.setContainerFileName(archiveFileName);
            cdxArchRec.setContainerLengthStr(Long.toString(archiveFile.length()));
            cdxArchRecords.add(cdxArchRec);
        }

        CsvMapper mapper = new CsvMapper();
        mapper.setDateFormat(GMTGTechDateFormat);

        String cdxfileCsColumns = config.getCdxfileCsColumns();
        List<String> cdxfileCsColumnsList = Arrays.asList(cdxfileCsColumns.split("\\s*,\\s*"));
        String[] cdxfileCsColumnsArray = cdxfileCsColumnsList.toArray(new String[cdxfileCsColumnsList.size()]);

        CsvSchema.Builder builder = CsvSchema.builder();
        for (String cdxField : cdxfileCsColumnsList) {
            builder.addColumn(cdxField);
        }
        builder.setColumnSeparator(' ');
        CsvSchema schema = builder.build();
        schema = schema.withoutQuoteChar();

        SimpleFilterProvider filterProvider = new SimpleFilterProvider().addFilter("cdxfields",
                FilterExceptFilter.filterOutAllExcept(cdxfileCsColumnsArray));

        ObjectWriter cdxArchRecordsWriter = mapper.writer(filterProvider).withSchema(schema);

        PrintStream pout = null;
        String outputPathStr = config.getOutputStr();
        if (outputPathStr != null) {
            FileOutputStream fos;
            try {
                fos = new FileOutputStream(outputPathStr, true);
                pout = new PrintStream(fos);
                System.setOut(pout);
            } catch (FileNotFoundException ex) {
                LOG.error("File not found error", ex);
            }
        }
        System.out.println(" " + config.getCdxfileCsHeader());

        cdxArchRecordsWriter.writeValue(System.out, cdxArchRecords);

        if (pout != null) {
            pout.close();
        }

    } catch (FileNotFoundException ex) {
        LOG.error("File not found error", ex);
    } catch (IOException ex) {
        LOG.error("I/O Error", ex);
    } finally {
        try {
            if (fileInputStream != null) {
                fileInputStream.close();
            }

            if (outputStream != null) {
                outputStream.close();
            }

        } catch (IOException ex) {
            LOG.error("I/O Error", ex);
        }
    }
}

From source file:edu.cornell.med.icb.goby.modes.SplitFastaMode.java

/**
 * Split a fasta / fastq file by (a) readlength and (b) the maximum number of
 * entries per file. This will output the files that are written to stdout
 * @throws IOException error reading / writing files.
 *//* w  w  w  .  j  a va 2 s .  co  m*/
@Override
public void execute() throws IOException {
    final FastXReader reader = new FastXReader(inputFile);
    final Int2ObjectMap<PrintStream> outputMap = new Int2ObjectOpenHashMap<PrintStream>();
    final Int2IntMap entriesPerReadLen = new Int2IntOpenHashMap();
    final Int2IntMap filesPerReadLen = new Int2IntOpenHashMap();
    final List<String> removeExt = Arrays.asList("gz", "fa", "mpfa", "fna", "fsa", "fas", "fasta", "fq", "mpfq",
            "fnq", "fsq", "fas", "fastq");
    String inputName = FilenameUtils.getName(inputFile);
    while (true) {
        // Remove the unwanted extensions from the file name
        final String ext = FilenameUtils.getExtension(inputName);
        if (!removeExt.contains(ext)) {
            break;
        }
        inputName = FilenameUtils.getBaseName(inputName);
    }
    final String outputFilenameTemplate = FilenameUtils.getFullPath(inputFile) + inputName
            + "._READLENGTH_._PART_." + reader.getFileType();
    final NumberFormat nf3 = NumberFormat.getInstance();
    nf3.setMinimumIntegerDigits(3);
    final NumberFormat nf2 = NumberFormat.getInstance();
    nf2.setMinimumIntegerDigits(2);
    for (final FastXEntry entry : reader) {
        final int readLen = Math.min(fastxSplitMaxLength, roundReadLen(entry.getReadLength(), splitReadsMod));
        PrintStream out = outputMap.get(readLen);
        if (out == null) {
            filesPerReadLen.put(readLen, 1);
            entriesPerReadLen.put(readLen, 0);
            String outputFilename = outputFilenameTemplate.replaceAll("_READLENGTH_", nf3.format(readLen));
            outputFilename = outputFilename.replaceAll("_PART_", nf2.format(1));
            System.out.println(outputFilename);
            out = new PrintStream(new BufferedOutputStream(new FileOutputStream(outputFilename)));
            outputMap.put(readLen, out);
        }
        int numEntries = entriesPerReadLen.get(readLen);
        if (numEntries == maxReadsPerFile) {
            out.close();
            numEntries = 0;
            int numFiles = filesPerReadLen.get(readLen);
            numFiles++;
            filesPerReadLen.put(readLen, numFiles);
            String outputFilename = outputFilenameTemplate.replaceAll("_READLENGTH_", nf3.format(readLen));
            outputFilename = outputFilename.replaceAll("_PART_", nf2.format(numFiles));
            System.out.println(outputFilename);
            out = new PrintStream(new BufferedOutputStream(new FileOutputStream(outputFilename)));
            outputMap.put(readLen, out);
        }
        out.println(entry.getEntry());
        entriesPerReadLen.put(readLen, numEntries + 1);
    }
    for (final PrintStream out : outputMap.values()) {
        out.close();
    }
    outputMap.clear();
    reader.close();
}

From source file:jenkins.plugins.logstash.persistence.ElasticSearchDao.java

private String getErrorMessage(CloseableHttpResponse response) {
    ByteArrayOutputStream byteStream = null;
    PrintStream stream = null;
    try {//ww w. j a  v a  2  s .co m
        byteStream = new ByteArrayOutputStream();
        stream = new PrintStream(byteStream);

        try {
            stream.print("HTTP error code: ");
            stream.println(response.getStatusLine().getStatusCode());
            stream.print("URI: ");
            stream.println(uri.toString());
            stream.println("RESPONSE: " + response.toString());
            response.getEntity().writeTo(stream);
        } catch (IOException e) {
            stream.println(ExceptionUtils.getStackTrace(e));
        }
        stream.flush();
        return byteStream.toString();
    } finally {
        if (stream != null) {
            stream.close();
        }
    }
}