Example usage for java.io FileOutputStream getChannel

List of usage examples for java.io FileOutputStream getChannel

Introduction

In this page you can find the example usage for java.io FileOutputStream getChannel.

Prototype

public FileChannel getChannel() 

Source Link

Document

Returns the unique java.nio.channels.FileChannel FileChannel object associated with this file output stream.

Usage

From source file:opennlp.tools.doc_classifier.DocClassifierTrainingSetMultilingualExtender.java

private void copyURLToFile(URL url, File file) {
    ReadableByteChannel rbc = null;
    try {//from w  w w.j  a  va  2s  . co m
        rbc = Channels.newChannel(url.openStream());
    } catch (IOException e1) {
        // TODO Auto-generated catch block
        e1.printStackTrace();
    }
    FileOutputStream fos = null;
    try {
        fos = new FileOutputStream(file.getAbsolutePath());
    } catch (FileNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    try {
        fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

}

From source file:com.metamx.druid.index.v1.IndexMerger.java

private static File makeIndexFiles(final List<IndexableAdapter> indexes, final File outDir,
        final ProgressIndicator progress, final List<String> mergedDimensions, final List<String> mergedMetrics,
        final Function<ArrayList<Iterable<Rowboat>>, Iterable<Rowboat>> rowMergerFn) throws IOException {
    Map<String, String> metricTypes = Maps.newTreeMap(Ordering.<String>natural().nullsFirst());
    for (IndexableAdapter adapter : indexes) {
        for (String metric : adapter.getAvailableMetrics()) {
            metricTypes.put(metric, adapter.getMetricType(metric));
        }/* w w w  .j a  v  a 2  s  . c  o  m*/
    }
    final Interval dataInterval;
    File v8OutDir = new File(outDir, "v8-tmp");
    v8OutDir.mkdirs();

    /*************  Main index.drd file **************/
    progress.progress();
    long startTime = System.currentTimeMillis();
    File indexFile = new File(v8OutDir, "index.drd");

    FileOutputStream fileOutputStream = null;
    FileChannel channel = null;
    try {
        fileOutputStream = new FileOutputStream(indexFile);
        channel = fileOutputStream.getChannel();
        channel.write(ByteBuffer.wrap(new byte[] { IndexIO.V8_VERSION }));

        GenericIndexed.fromIterable(mergedDimensions, GenericIndexed.stringStrategy).writeToChannel(channel);
        GenericIndexed.fromIterable(mergedMetrics, GenericIndexed.stringStrategy).writeToChannel(channel);

        DateTime minTime = new DateTime(Long.MAX_VALUE);
        DateTime maxTime = new DateTime(0l);

        for (IndexableAdapter index : indexes) {
            minTime = JodaUtils.minDateTime(minTime, index.getDataInterval().getStart());
            maxTime = JodaUtils.maxDateTime(maxTime, index.getDataInterval().getEnd());
        }

        dataInterval = new Interval(minTime, maxTime);
        serializerUtils.writeString(channel, String.format("%s/%s", minTime, maxTime));
    } finally {
        Closeables.closeQuietly(channel);
        channel = null;
        Closeables.closeQuietly(fileOutputStream);
        fileOutputStream = null;
    }
    IndexIO.checkFileSize(indexFile);
    log.info("outDir[%s] completed index.drd in %,d millis.", v8OutDir, System.currentTimeMillis() - startTime);

    /************* Setup Dim Conversions **************/
    progress.progress();
    startTime = System.currentTimeMillis();

    IOPeon ioPeon = new TmpFileIOPeon();
    ArrayList<FileOutputSupplier> dimOuts = Lists.newArrayListWithCapacity(mergedDimensions.size());
    Map<String, Integer> dimensionCardinalities = Maps.newHashMap();
    ArrayList<Map<String, IntBuffer>> dimConversions = Lists.newArrayListWithCapacity(indexes.size());

    for (IndexableAdapter index : indexes) {
        dimConversions.add(Maps.<String, IntBuffer>newHashMap());
    }

    for (String dimension : mergedDimensions) {
        final GenericIndexedWriter<String> writer = new GenericIndexedWriter<String>(ioPeon, dimension,
                GenericIndexed.stringStrategy);
        writer.open();

        List<Indexed<String>> dimValueLookups = Lists.newArrayListWithCapacity(indexes.size());
        DimValueConverter[] converters = new DimValueConverter[indexes.size()];
        for (int i = 0; i < indexes.size(); i++) {
            Indexed<String> dimValues = indexes.get(i).getDimValueLookup(dimension);
            if (dimValues != null) {
                dimValueLookups.add(dimValues);
                converters[i] = new DimValueConverter(dimValues);
            }
        }

        Iterable<String> dimensionValues = CombiningIterable.createSplatted(
                Iterables.transform(dimValueLookups, new Function<Indexed<String>, Iterable<String>>() {
                    @Override
                    public Iterable<String> apply(@Nullable Indexed<String> indexed) {
                        return Iterables.transform(indexed, new Function<String, String>() {
                            @Override
                            public String apply(@Nullable String input) {
                                return (input == null) ? "" : input;
                            }
                        });
                    }
                }), Ordering.<String>natural().nullsFirst());

        int count = 0;
        for (String value : dimensionValues) {
            value = value == null ? "" : value;
            writer.write(value);

            for (int i = 0; i < indexes.size(); i++) {
                DimValueConverter converter = converters[i];
                if (converter != null) {
                    converter.convert(value, count);
                }
            }

            ++count;
        }
        dimensionCardinalities.put(dimension, count);

        FileOutputSupplier dimOut = new FileOutputSupplier(IndexIO.makeDimFile(v8OutDir, dimension), true);
        dimOuts.add(dimOut);

        writer.close();
        serializerUtils.writeString(dimOut, dimension);
        ByteStreams.copy(writer.combineStreams(), dimOut);
        for (int i = 0; i < indexes.size(); ++i) {
            DimValueConverter converter = converters[i];
            if (converter != null) {
                dimConversions.get(i).put(dimension, converters[i].getConversionBuffer());
            }
        }

        ioPeon.cleanup();
    }
    log.info("outDir[%s] completed dim conversions in %,d millis.", v8OutDir,
            System.currentTimeMillis() - startTime);

    /************* Walk through data sets and merge them *************/
    progress.progress();
    startTime = System.currentTimeMillis();

    ArrayList<Iterable<Rowboat>> boats = Lists.newArrayListWithCapacity(indexes.size());

    for (int i = 0; i < indexes.size(); ++i) {
        final IndexableAdapter adapter = indexes.get(i);

        final int[] dimLookup = new int[mergedDimensions.size()];
        int count = 0;
        for (String dim : adapter.getAvailableDimensions()) {
            dimLookup[count] = mergedDimensions.indexOf(dim.toLowerCase());
            count++;
        }

        final int[] metricLookup = new int[mergedMetrics.size()];
        count = 0;
        for (String metric : adapter.getAvailableMetrics()) {
            metricLookup[count] = mergedMetrics.indexOf(metric);
            count++;
        }

        boats.add(new MMappedIndexRowIterable(
                Iterables.transform(indexes.get(i).getRows(), new Function<Rowboat, Rowboat>() {
                    @Override
                    public Rowboat apply(@Nullable Rowboat input) {
                        int[][] newDims = new int[mergedDimensions.size()][];
                        int j = 0;
                        for (int[] dim : input.getDims()) {
                            newDims[dimLookup[j]] = dim;
                            j++;
                        }

                        Object[] newMetrics = new Object[mergedMetrics.size()];
                        j = 0;
                        for (Object met : input.getMetrics()) {
                            newMetrics[metricLookup[j]] = met;
                            j++;
                        }

                        return new Rowboat(input.getTimestamp(), newDims, newMetrics, input.getRowNum(),
                                input.getDescriptions());
                    }
                }), mergedDimensions, dimConversions.get(i), i));
    }

    Iterable<Rowboat> theRows = rowMergerFn.apply(boats);

    CompressedLongsSupplierSerializer timeWriter = CompressedLongsSupplierSerializer.create(ioPeon,
            "little_end_time", IndexIO.BYTE_ORDER);

    timeWriter.open();

    ArrayList<VSizeIndexedWriter> forwardDimWriters = Lists.newArrayListWithCapacity(mergedDimensions.size());
    for (String dimension : mergedDimensions) {
        VSizeIndexedWriter writer = new VSizeIndexedWriter(ioPeon, dimension,
                dimensionCardinalities.get(dimension));
        writer.open();
        forwardDimWriters.add(writer);
    }

    ArrayList<MetricColumnSerializer> metWriters = Lists.newArrayListWithCapacity(mergedMetrics.size());
    for (Map.Entry<String, String> entry : metricTypes.entrySet()) {
        String metric = entry.getKey();
        String typeName = entry.getValue();
        if ("float".equals(typeName)) {
            metWriters.add(new FloatMetricColumnSerializer(metric, v8OutDir, ioPeon));
        } else {
            ComplexMetricSerde serde = ComplexMetrics.getSerdeForType(typeName);

            if (serde == null) {
                throw new ISE("Unknown type[%s]", typeName);
            }

            metWriters.add(new ComplexMetricColumnSerializer(metric, v8OutDir, ioPeon, serde));
        }
    }
    for (MetricColumnSerializer metWriter : metWriters) {
        metWriter.open();
    }

    int rowCount = 0;
    long time = System.currentTimeMillis();
    List<IntBuffer> rowNumConversions = Lists.newArrayListWithCapacity(indexes.size());
    for (IndexableAdapter index : indexes) {
        int[] arr = new int[index.getNumRows()];
        Arrays.fill(arr, INVALID_ROW);
        rowNumConversions.add(IntBuffer.wrap(arr));
    }

    final Map<String, String> descriptions = Maps.newHashMap();
    for (Rowboat theRow : theRows) {
        progress.progress();
        timeWriter.add(theRow.getTimestamp());

        final Object[] metrics = theRow.getMetrics();
        for (int i = 0; i < metrics.length; ++i) {
            metWriters.get(i).serialize(metrics[i]);
        }

        int[][] dims = theRow.getDims();
        for (int i = 0; i < dims.length; ++i) {
            List<Integer> listToWrite = (i >= dims.length || dims[i] == null) ? null : Ints.asList(dims[i]);
            forwardDimWriters.get(i).write(listToWrite);
        }

        for (Map.Entry<Integer, TreeSet<Integer>> comprisedRow : theRow.getComprisedRows().entrySet()) {
            final IntBuffer conversionBuffer = rowNumConversions.get(comprisedRow.getKey());

            for (Integer rowNum : comprisedRow.getValue()) {
                while (conversionBuffer.position() < rowNum) {
                    conversionBuffer.put(INVALID_ROW);
                }
                conversionBuffer.put(rowCount);
            }
        }

        if ((++rowCount % 500000) == 0) {
            log.info("outDir[%s] walked 500,000/%,d rows in %,d millis.", v8OutDir, rowCount,
                    System.currentTimeMillis() - time);
            time = System.currentTimeMillis();
        }

        descriptions.putAll(theRow.getDescriptions());
    }

    for (IntBuffer rowNumConversion : rowNumConversions) {
        rowNumConversion.rewind();
    }

    final File timeFile = IndexIO.makeTimeFile(v8OutDir, IndexIO.BYTE_ORDER);
    timeFile.delete();
    OutputSupplier<FileOutputStream> out = Files.newOutputStreamSupplier(timeFile, true);
    timeWriter.closeAndConsolidate(out);
    IndexIO.checkFileSize(timeFile);

    for (int i = 0; i < mergedDimensions.size(); ++i) {
        forwardDimWriters.get(i).close();
        ByteStreams.copy(forwardDimWriters.get(i).combineStreams(), dimOuts.get(i));
    }

    for (MetricColumnSerializer metWriter : metWriters) {
        metWriter.close();
    }

    ioPeon.cleanup();
    log.info("outDir[%s] completed walk through of %,d rows in %,d millis.", v8OutDir, rowCount,
            System.currentTimeMillis() - startTime);

    /************ Create Inverted Indexes *************/
    startTime = System.currentTimeMillis();

    final File invertedFile = new File(v8OutDir, "inverted.drd");
    Files.touch(invertedFile);
    out = Files.newOutputStreamSupplier(invertedFile, true);

    final File geoFile = new File(v8OutDir, "spatial.drd");
    Files.touch(geoFile);
    OutputSupplier<FileOutputStream> spatialOut = Files.newOutputStreamSupplier(geoFile, true);

    for (int i = 0; i < mergedDimensions.size(); ++i) {
        long dimStartTime = System.currentTimeMillis();
        String dimension = mergedDimensions.get(i);

        File dimOutFile = dimOuts.get(i).getFile();
        final MappedByteBuffer dimValsMapped = Files.map(dimOutFile);

        if (!dimension.equals(serializerUtils.readString(dimValsMapped))) {
            throw new ISE("dimensions[%s] didn't equate!?  This is a major WTF moment.", dimension);
        }
        Indexed<String> dimVals = GenericIndexed.read(dimValsMapped, GenericIndexed.stringStrategy);
        log.info("Starting dimension[%s] with cardinality[%,d]", dimension, dimVals.size());

        GenericIndexedWriter<ImmutableConciseSet> writer = new GenericIndexedWriter<ImmutableConciseSet>(ioPeon,
                dimension, ConciseCompressedIndexedInts.objectStrategy);
        writer.open();

        boolean isSpatialDim = "spatial".equals(descriptions.get(dimension));
        ByteBufferWriter<ImmutableRTree> spatialWriter = null;
        RTree tree = null;
        IOPeon spatialIoPeon = new TmpFileIOPeon();
        if (isSpatialDim) {
            spatialWriter = new ByteBufferWriter<ImmutableRTree>(spatialIoPeon, dimension,
                    IndexedRTree.objectStrategy);
            spatialWriter.open();
            tree = new RTree(2, new LinearGutmanSplitStrategy(0, 50));
        }

        for (String dimVal : IndexedIterable.create(dimVals)) {
            progress.progress();
            List<Iterable<Integer>> convertedInverteds = Lists.newArrayListWithCapacity(indexes.size());
            for (int j = 0; j < indexes.size(); ++j) {
                convertedInverteds.add(new ConvertingIndexedInts(indexes.get(j).getInverteds(dimension, dimVal),
                        rowNumConversions.get(j)));
            }

            ConciseSet bitset = new ConciseSet();
            for (Integer row : CombiningIterable.createSplatted(convertedInverteds,
                    Ordering.<Integer>natural().nullsFirst())) {
                if (row != INVALID_ROW) {
                    bitset.add(row);
                }
            }

            writer.write(ImmutableConciseSet.newImmutableFromMutable(bitset));

            if (isSpatialDim && dimVal != null) {
                List<String> stringCoords = Lists.newArrayList(SPLITTER.split(dimVal));
                float[] coords = new float[stringCoords.size()];
                for (int j = 0; j < coords.length; j++) {
                    coords[j] = Float.valueOf(stringCoords.get(j));
                }
                tree.insert(coords, bitset);
            }
        }
        writer.close();

        serializerUtils.writeString(out, dimension);
        ByteStreams.copy(writer.combineStreams(), out);
        ioPeon.cleanup();

        log.info("Completed dimension[%s] in %,d millis.", dimension,
                System.currentTimeMillis() - dimStartTime);

        if (isSpatialDim) {
            spatialWriter.write(ImmutableRTree.newImmutableFromMutable(tree));
            spatialWriter.close();

            serializerUtils.writeString(spatialOut, dimension);
            ByteStreams.copy(spatialWriter.combineStreams(), spatialOut);
            spatialIoPeon.cleanup();
        }

    }

    log.info("outDir[%s] completed inverted.drd in %,d millis.", v8OutDir,
            System.currentTimeMillis() - startTime);

    final ArrayList<String> expectedFiles = Lists.newArrayList(Iterables.concat(
            Arrays.asList("index.drd", "inverted.drd", "spatial.drd",
                    String.format("time_%s.drd", IndexIO.BYTE_ORDER)),
            Iterables.transform(mergedDimensions, GuavaUtils.formatFunction("dim_%s.drd")),
            Iterables.transform(mergedMetrics,
                    GuavaUtils.formatFunction(String.format("met_%%s_%s.drd", IndexIO.BYTE_ORDER)))));

    Map<String, File> files = Maps.newLinkedHashMap();
    for (String fileName : expectedFiles) {
        files.put(fileName, new File(v8OutDir, fileName));
    }

    File smooshDir = new File(v8OutDir, "smoosher");
    smooshDir.mkdir();

    for (Map.Entry<String, File> entry : Smoosh.smoosh(v8OutDir, smooshDir, files).entrySet()) {
        entry.getValue().delete();
    }

    for (File file : smooshDir.listFiles()) {
        Files.move(file, new File(v8OutDir, file.getName()));
    }

    if (!smooshDir.delete()) {
        log.info("Unable to delete temporary dir[%s], contains[%s]", smooshDir,
                Arrays.asList(smooshDir.listFiles()));
        throw new IOException(String.format("Unable to delete temporary dir[%s]", smooshDir));
    }

    createIndexDrdFile(IndexIO.V8_VERSION, v8OutDir,
            GenericIndexed.fromIterable(mergedDimensions, GenericIndexed.stringStrategy),
            GenericIndexed.fromIterable(mergedMetrics, GenericIndexed.stringStrategy), dataInterval);

    IndexIO.DefaultIndexIOHandler.convertV8toV9(v8OutDir, outDir);
    FileUtils.deleteDirectory(v8OutDir);

    return outDir;
}

From source file:com.rogue.logore.update.UpdateHandler.java

/**
 * Downloads the latest jarfile for the {@link Plugin}
 *
 * @since 1.0.0/* w  ww. j  a  v a 2  s  .c om*/
 * @version 1.0.0
 *
 * @TODO Add zip file support
 * @return The download result
 */
public Result download() {
    Result back = Result.UPDATED;
    File updateFolder = this.plugin.getServer().getUpdateFolderFile();
    String url = (String) this.latest.get(this.DL_URL);
    ReadableByteChannel rbc = null;
    FileOutputStream fos = null;
    try {
        URL call = new URL(url);
        rbc = Channels.newChannel(call.openStream());
        fos = new FileOutputStream(this.file);
        fos.getChannel().transferFrom(rbc, 0, 1 << 24);
    } catch (MalformedURLException ex) {
        this.plugin.getLogger().log(Level.SEVERE, "Error finding plugin update to download!", ex);
        back = Result.ERROR_FILENOTFOUND;
    } catch (IOException ex) {
        this.plugin.getLogger().log(Level.SEVERE, "Error transferring plugin data!", ex);
        back = Result.ERROR_DOWNLOAD_FAILED;
    } finally {
        try {
            if (fos != null) {
                fos.close();
            }
            if (rbc != null) {
                rbc.close();
            }
        } catch (IOException ex) {
            this.plugin.getLogger().log(Level.SEVERE, "Error closing streams/channels for download!", ex);
        }
    }
    return back;
}

From source file:org.biopax.psidev.ontology_manager.impl.OboLoader.java

/**
 * Load an OBO file from an URL./*from  w w  w.  j av a  2  s.c om*/
 *
 * @param url the URL to load (must not be null)
 * @return an ontology
 * @see #parseOboFile(File, String)
 */
public OntologyAccess parseOboFile(URL url, String ontologyID) throws OntologyLoaderException {
    // load config file (ie. a map)
    // check if that URL has already been loaded
    // if so, get the associated temp file and check if available
    // if available, then load it and skip URL load
    // if any of the above failed, load it from the network.

    if (url == null) {
        throw new IllegalArgumentException("Please give a non null URL.");
    }

    File ontologyFile = null;

    try {
        if (ontologyFile == null || !ontologyFile.exists() || !ontologyFile.canRead()) {
            // if it is not defined, not there or not readable...
            // Read URL content
            log.info("Loading URL: " + url);
            URLConnection con = url.openConnection();
            long size = con.getContentLength(); // -1 if not stat available
            log.info("size = " + size);

            InputStream is = url.openStream();

            // make the temporary file name specific to the URL
            String name = null;
            String filename = url.getFile();
            int idx = filename.lastIndexOf('/');
            if (idx != -1) {
                name = filename.substring(idx + 1, filename.length());
                name = name.replaceAll("[.,;:&^%$@*?=]", "_");
            } else {
                name = "unknown";
            }

            ontologyFile = Files.createTempFile(name + "_", ".obo").toFile();
            ontologyFile.deleteOnExit();
            log.debug("The OBO file will be temporary stored as: " + ontologyFile.getAbsolutePath());

            FileOutputStream out = new FileOutputStream(ontologyFile);
            if (size == -1)
                size = 1024 * 1024 * 1024; //Integer.MAX_VALUE;
            ReadableByteChannel source = Channels.newChannel(is);
            size = out.getChannel().transferFrom(source, 0, size);
            log.info(size + " bytes downloaded");

            is.close();
            out.flush();
            out.close();
        }

        if (ontologyFile == null) {
            log.error("The ontology file is still null...");
        }

        // Parse file
        return parseOboFile(ontologyFile, ontologyID);

    } catch (IOException e) {
        throw new OntologyLoaderException("Error while loading URL (" + url + ")", e);
    }
}

From source file:net.flamefeed.ftb.modpackupdater.FileOperator.java

/**
 * Download remote file and save output to file system. Called from constructor,
 * so final to prevent being overridden.
 * /* w w w.  j a  v  a2 s  .  c  o  m*/
 * @param path
 * A relative path to the file which will be downloaded. This determines both
 * the target URL and the local destination path. Example:
 * "mods/gregtechmod.zip"
 * 
 * @throws java.io.IOException
 */

public void downloadFile(String path) throws IOException {
    URL urlRemoteTarget = new URL(REMOTE_FILES_LOCATION + "/" + path);
    ReadableByteChannel in = Channels.newChannel(urlRemoteTarget.openStream());
    FileOutputStream out = new FileOutputStream(pathMinecraft + "/" + path);
    out.getChannel().transferFrom(in, 0, Long.MAX_VALUE);
}

From source file:com.radiohitwave.ftpsync.API.java

public String DownloadApplicationFile() throws IOException {
    try {//from   w ww. j av a 2  s  . c  o m
        URL website = new URL(this.apiDomain + this.updateRemoteFile + "?" + System.nanoTime());
        ReadableByteChannel rbc = Channels.newChannel(website.openStream());
        File downloadFolder = new File(this.tempFilePath);
        downloadFolder.mkdirs();
        FileOutputStream fos = new FileOutputStream(this.tempFilePath + "/" + this.updateLocalFile);
        fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
        return this.tempFilePath;
    } catch (FileNotFoundException ex) {
        Log.error(ex.toString());
        Log.remoteError(ex);
    }
    return null;
}

From source file:io.crate.frameworks.mesos.CrateExecutor.java

private boolean fetchAndExtractUri(URI uri) {
    boolean success;
    try {/*from   w  ww. ja v  a 2  s  .  c  o m*/
        URL download = uri.toURL();
        String fn = new File(download.getFile()).getName();
        File tmpFile = new File(fn);
        if (!tmpFile.exists()) {
            if (tmpFile.createNewFile()) {
                LOGGER.debug("Fetch: {} -> {}", download, tmpFile);
                ReadableByteChannel rbc = Channels.newChannel(download.openStream());
                FileOutputStream stream = new FileOutputStream(tmpFile);
                stream.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
            }
        } else {
            LOGGER.debug("tarball already downloaded");
        }
        success = extractFile(tmpFile);
    } catch (IOException e) {
        e.printStackTrace();
        success = false;
    }
    return success;
}

From source file:org.apache.pig.data.SchemaTupleBackend.java

private void copyAllFromDistributedCache() throws IOException {
    String toDeserialize = jConf.get(PigConstants.GENERATED_CLASSES_KEY);
    if (toDeserialize == null) {
        LOG.info("No classes in in key [" + PigConstants.GENERATED_CLASSES_KEY
                + "] to copy from distributed cache.");
        return;//  ww  w .  j a va 2 s . com
    }
    LOG.info("Copying files in key [" + PigConstants.GENERATED_CLASSES_KEY + "] from distributed cache: "
            + toDeserialize);
    for (String s : toDeserialize.split(",")) {
        LOG.info("Attempting to read file: " + s);
        // The string is the symlink into the distributed cache
        File src = new File(s);
        FileInputStream fin = null;
        FileOutputStream fos = null;
        try {
            fin = new FileInputStream(src);
            fos = new FileOutputStream(new File(codeDir, s));

            fin.getChannel().transferTo(0, src.length(), fos.getChannel());
            LOG.info("Successfully copied file to local directory.");
        } finally {
            if (fin != null) {
                fin.close();
            }
            if (fos != null) {
                fos.close();
            }
        }
    }
}

From source file:edu.harvard.iq.dvn.core.analysis.NetworkDataServiceBean.java

private void copyFile(StudyFileEditBean editBean) throws IOException {
    File tempFile = new File(editBean.getTempSystemFileLocation());
    dbgLog.fine("begin copyFile()");
    // create a sub-directory "ingested"
    File newDir = new File(tempFile.getParentFile(), "ingested");

    if (!newDir.exists()) {
        newDir.mkdirs();/*from  w  w w  .  java 2s .  c om*/
    }
    dbgLog.fine("newDir: abs path:\n" + newDir.getAbsolutePath());

    File newFile = new File(newDir, tempFile.getName());

    FileInputStream fis = new FileInputStream(tempFile);
    FileOutputStream fos = new FileOutputStream(newFile);
    FileChannel fcin = fis.getChannel();
    FileChannel fcout = fos.getChannel();
    fcin.transferTo(0, fcin.size(), fcout);
    fcin.close();
    fcout.close();
    fis.close();
    fos.close();

    dbgLog.fine("newFile: abs path:\n" + newFile.getAbsolutePath());

    // store the tab-file location
    editBean.setIngestedSystemFileLocation(newFile.getAbsolutePath());

}

From source file:com.codelanx.playtime.update.UpdateHandler.java

/**
 * Downloads the latest jarfile for the {@link Plugin}
 *
 * @since 1.4.5/*  w ww .  j  a  va  2s .co m*/
 * @version 1.4.5
 *
 * @TODO Add zip file support
 * @return The download result
 */
public Result download() {
    Result back = Result.UPDATED;
    File updateLoc = this.plugin.getServer().getUpdateFolderFile();
    updateLoc.mkdirs();
    String url = (String) this.latest.get(this.DL_URL);
    File location = new File(updateLoc, this.file);
    ReadableByteChannel rbc = null;
    FileOutputStream fos = null;
    try {
        URL call = new URL(url);
        rbc = Channels.newChannel(call.openStream());
        fos = new FileOutputStream(location);
        fos.getChannel().transferFrom(rbc, 0, 1 << 24);
    } catch (MalformedURLException ex) {
        this.plugin.getLogger().log(Level.SEVERE, "Error finding plugin update to download!", ex);
        back = Result.ERROR_FILENOTFOUND;
    } catch (IOException ex) {
        this.plugin.getLogger().log(Level.SEVERE, "Error transferring plugin data!", ex);
        back = Result.ERROR_DOWNLOAD_FAILED;
    } finally {
        try {
            if (fos != null) {
                fos.close();
            }
            if (rbc != null) {
                rbc.close();
            }
        } catch (IOException ex) {
            this.plugin.getLogger().log(Level.SEVERE, "Error closing streams/channels for download!", ex);
        }
    }
    return back;
}