Example usage for java.nio.channels FileChannel write

List of usage examples for java.nio.channels FileChannel write

Introduction

In this page you can find the example usage for java.nio.channels FileChannel write.

Prototype

public final long write(ByteBuffer[] srcs) throws IOException 

Source Link

Document

Writes a sequence of bytes to this channel from the given buffers.

Usage

From source file:com.metamx.druid.index.v1.IndexMerger.java

private static File makeIndexFiles(final List<IndexableAdapter> indexes, final File outDir,
        final ProgressIndicator progress, final List<String> mergedDimensions, final List<String> mergedMetrics,
        final Function<ArrayList<Iterable<Rowboat>>, Iterable<Rowboat>> rowMergerFn) throws IOException {
    Map<String, String> metricTypes = Maps.newTreeMap(Ordering.<String>natural().nullsFirst());
    for (IndexableAdapter adapter : indexes) {
        for (String metric : adapter.getAvailableMetrics()) {
            metricTypes.put(metric, adapter.getMetricType(metric));
        }//from   w w  w.ja  v  a2 s .  c  om
    }
    final Interval dataInterval;
    File v8OutDir = new File(outDir, "v8-tmp");
    v8OutDir.mkdirs();

    /*************  Main index.drd file **************/
    progress.progress();
    long startTime = System.currentTimeMillis();
    File indexFile = new File(v8OutDir, "index.drd");

    FileOutputStream fileOutputStream = null;
    FileChannel channel = null;
    try {
        fileOutputStream = new FileOutputStream(indexFile);
        channel = fileOutputStream.getChannel();
        channel.write(ByteBuffer.wrap(new byte[] { IndexIO.V8_VERSION }));

        GenericIndexed.fromIterable(mergedDimensions, GenericIndexed.stringStrategy).writeToChannel(channel);
        GenericIndexed.fromIterable(mergedMetrics, GenericIndexed.stringStrategy).writeToChannel(channel);

        DateTime minTime = new DateTime(Long.MAX_VALUE);
        DateTime maxTime = new DateTime(0l);

        for (IndexableAdapter index : indexes) {
            minTime = JodaUtils.minDateTime(minTime, index.getDataInterval().getStart());
            maxTime = JodaUtils.maxDateTime(maxTime, index.getDataInterval().getEnd());
        }

        dataInterval = new Interval(minTime, maxTime);
        serializerUtils.writeString(channel, String.format("%s/%s", minTime, maxTime));
    } finally {
        Closeables.closeQuietly(channel);
        channel = null;
        Closeables.closeQuietly(fileOutputStream);
        fileOutputStream = null;
    }
    IndexIO.checkFileSize(indexFile);
    log.info("outDir[%s] completed index.drd in %,d millis.", v8OutDir, System.currentTimeMillis() - startTime);

    /************* Setup Dim Conversions **************/
    progress.progress();
    startTime = System.currentTimeMillis();

    IOPeon ioPeon = new TmpFileIOPeon();
    ArrayList<FileOutputSupplier> dimOuts = Lists.newArrayListWithCapacity(mergedDimensions.size());
    Map<String, Integer> dimensionCardinalities = Maps.newHashMap();
    ArrayList<Map<String, IntBuffer>> dimConversions = Lists.newArrayListWithCapacity(indexes.size());

    for (IndexableAdapter index : indexes) {
        dimConversions.add(Maps.<String, IntBuffer>newHashMap());
    }

    for (String dimension : mergedDimensions) {
        final GenericIndexedWriter<String> writer = new GenericIndexedWriter<String>(ioPeon, dimension,
                GenericIndexed.stringStrategy);
        writer.open();

        List<Indexed<String>> dimValueLookups = Lists.newArrayListWithCapacity(indexes.size());
        DimValueConverter[] converters = new DimValueConverter[indexes.size()];
        for (int i = 0; i < indexes.size(); i++) {
            Indexed<String> dimValues = indexes.get(i).getDimValueLookup(dimension);
            if (dimValues != null) {
                dimValueLookups.add(dimValues);
                converters[i] = new DimValueConverter(dimValues);
            }
        }

        Iterable<String> dimensionValues = CombiningIterable.createSplatted(
                Iterables.transform(dimValueLookups, new Function<Indexed<String>, Iterable<String>>() {
                    @Override
                    public Iterable<String> apply(@Nullable Indexed<String> indexed) {
                        return Iterables.transform(indexed, new Function<String, String>() {
                            @Override
                            public String apply(@Nullable String input) {
                                return (input == null) ? "" : input;
                            }
                        });
                    }
                }), Ordering.<String>natural().nullsFirst());

        int count = 0;
        for (String value : dimensionValues) {
            value = value == null ? "" : value;
            writer.write(value);

            for (int i = 0; i < indexes.size(); i++) {
                DimValueConverter converter = converters[i];
                if (converter != null) {
                    converter.convert(value, count);
                }
            }

            ++count;
        }
        dimensionCardinalities.put(dimension, count);

        FileOutputSupplier dimOut = new FileOutputSupplier(IndexIO.makeDimFile(v8OutDir, dimension), true);
        dimOuts.add(dimOut);

        writer.close();
        serializerUtils.writeString(dimOut, dimension);
        ByteStreams.copy(writer.combineStreams(), dimOut);
        for (int i = 0; i < indexes.size(); ++i) {
            DimValueConverter converter = converters[i];
            if (converter != null) {
                dimConversions.get(i).put(dimension, converters[i].getConversionBuffer());
            }
        }

        ioPeon.cleanup();
    }
    log.info("outDir[%s] completed dim conversions in %,d millis.", v8OutDir,
            System.currentTimeMillis() - startTime);

    /************* Walk through data sets and merge them *************/
    progress.progress();
    startTime = System.currentTimeMillis();

    ArrayList<Iterable<Rowboat>> boats = Lists.newArrayListWithCapacity(indexes.size());

    for (int i = 0; i < indexes.size(); ++i) {
        final IndexableAdapter adapter = indexes.get(i);

        final int[] dimLookup = new int[mergedDimensions.size()];
        int count = 0;
        for (String dim : adapter.getAvailableDimensions()) {
            dimLookup[count] = mergedDimensions.indexOf(dim.toLowerCase());
            count++;
        }

        final int[] metricLookup = new int[mergedMetrics.size()];
        count = 0;
        for (String metric : adapter.getAvailableMetrics()) {
            metricLookup[count] = mergedMetrics.indexOf(metric);
            count++;
        }

        boats.add(new MMappedIndexRowIterable(
                Iterables.transform(indexes.get(i).getRows(), new Function<Rowboat, Rowboat>() {
                    @Override
                    public Rowboat apply(@Nullable Rowboat input) {
                        int[][] newDims = new int[mergedDimensions.size()][];
                        int j = 0;
                        for (int[] dim : input.getDims()) {
                            newDims[dimLookup[j]] = dim;
                            j++;
                        }

                        Object[] newMetrics = new Object[mergedMetrics.size()];
                        j = 0;
                        for (Object met : input.getMetrics()) {
                            newMetrics[metricLookup[j]] = met;
                            j++;
                        }

                        return new Rowboat(input.getTimestamp(), newDims, newMetrics, input.getRowNum(),
                                input.getDescriptions());
                    }
                }), mergedDimensions, dimConversions.get(i), i));
    }

    Iterable<Rowboat> theRows = rowMergerFn.apply(boats);

    CompressedLongsSupplierSerializer timeWriter = CompressedLongsSupplierSerializer.create(ioPeon,
            "little_end_time", IndexIO.BYTE_ORDER);

    timeWriter.open();

    ArrayList<VSizeIndexedWriter> forwardDimWriters = Lists.newArrayListWithCapacity(mergedDimensions.size());
    for (String dimension : mergedDimensions) {
        VSizeIndexedWriter writer = new VSizeIndexedWriter(ioPeon, dimension,
                dimensionCardinalities.get(dimension));
        writer.open();
        forwardDimWriters.add(writer);
    }

    ArrayList<MetricColumnSerializer> metWriters = Lists.newArrayListWithCapacity(mergedMetrics.size());
    for (Map.Entry<String, String> entry : metricTypes.entrySet()) {
        String metric = entry.getKey();
        String typeName = entry.getValue();
        if ("float".equals(typeName)) {
            metWriters.add(new FloatMetricColumnSerializer(metric, v8OutDir, ioPeon));
        } else {
            ComplexMetricSerde serde = ComplexMetrics.getSerdeForType(typeName);

            if (serde == null) {
                throw new ISE("Unknown type[%s]", typeName);
            }

            metWriters.add(new ComplexMetricColumnSerializer(metric, v8OutDir, ioPeon, serde));
        }
    }
    for (MetricColumnSerializer metWriter : metWriters) {
        metWriter.open();
    }

    int rowCount = 0;
    long time = System.currentTimeMillis();
    List<IntBuffer> rowNumConversions = Lists.newArrayListWithCapacity(indexes.size());
    for (IndexableAdapter index : indexes) {
        int[] arr = new int[index.getNumRows()];
        Arrays.fill(arr, INVALID_ROW);
        rowNumConversions.add(IntBuffer.wrap(arr));
    }

    final Map<String, String> descriptions = Maps.newHashMap();
    for (Rowboat theRow : theRows) {
        progress.progress();
        timeWriter.add(theRow.getTimestamp());

        final Object[] metrics = theRow.getMetrics();
        for (int i = 0; i < metrics.length; ++i) {
            metWriters.get(i).serialize(metrics[i]);
        }

        int[][] dims = theRow.getDims();
        for (int i = 0; i < dims.length; ++i) {
            List<Integer> listToWrite = (i >= dims.length || dims[i] == null) ? null : Ints.asList(dims[i]);
            forwardDimWriters.get(i).write(listToWrite);
        }

        for (Map.Entry<Integer, TreeSet<Integer>> comprisedRow : theRow.getComprisedRows().entrySet()) {
            final IntBuffer conversionBuffer = rowNumConversions.get(comprisedRow.getKey());

            for (Integer rowNum : comprisedRow.getValue()) {
                while (conversionBuffer.position() < rowNum) {
                    conversionBuffer.put(INVALID_ROW);
                }
                conversionBuffer.put(rowCount);
            }
        }

        if ((++rowCount % 500000) == 0) {
            log.info("outDir[%s] walked 500,000/%,d rows in %,d millis.", v8OutDir, rowCount,
                    System.currentTimeMillis() - time);
            time = System.currentTimeMillis();
        }

        descriptions.putAll(theRow.getDescriptions());
    }

    for (IntBuffer rowNumConversion : rowNumConversions) {
        rowNumConversion.rewind();
    }

    final File timeFile = IndexIO.makeTimeFile(v8OutDir, IndexIO.BYTE_ORDER);
    timeFile.delete();
    OutputSupplier<FileOutputStream> out = Files.newOutputStreamSupplier(timeFile, true);
    timeWriter.closeAndConsolidate(out);
    IndexIO.checkFileSize(timeFile);

    for (int i = 0; i < mergedDimensions.size(); ++i) {
        forwardDimWriters.get(i).close();
        ByteStreams.copy(forwardDimWriters.get(i).combineStreams(), dimOuts.get(i));
    }

    for (MetricColumnSerializer metWriter : metWriters) {
        metWriter.close();
    }

    ioPeon.cleanup();
    log.info("outDir[%s] completed walk through of %,d rows in %,d millis.", v8OutDir, rowCount,
            System.currentTimeMillis() - startTime);

    /************ Create Inverted Indexes *************/
    startTime = System.currentTimeMillis();

    final File invertedFile = new File(v8OutDir, "inverted.drd");
    Files.touch(invertedFile);
    out = Files.newOutputStreamSupplier(invertedFile, true);

    final File geoFile = new File(v8OutDir, "spatial.drd");
    Files.touch(geoFile);
    OutputSupplier<FileOutputStream> spatialOut = Files.newOutputStreamSupplier(geoFile, true);

    for (int i = 0; i < mergedDimensions.size(); ++i) {
        long dimStartTime = System.currentTimeMillis();
        String dimension = mergedDimensions.get(i);

        File dimOutFile = dimOuts.get(i).getFile();
        final MappedByteBuffer dimValsMapped = Files.map(dimOutFile);

        if (!dimension.equals(serializerUtils.readString(dimValsMapped))) {
            throw new ISE("dimensions[%s] didn't equate!?  This is a major WTF moment.", dimension);
        }
        Indexed<String> dimVals = GenericIndexed.read(dimValsMapped, GenericIndexed.stringStrategy);
        log.info("Starting dimension[%s] with cardinality[%,d]", dimension, dimVals.size());

        GenericIndexedWriter<ImmutableConciseSet> writer = new GenericIndexedWriter<ImmutableConciseSet>(ioPeon,
                dimension, ConciseCompressedIndexedInts.objectStrategy);
        writer.open();

        boolean isSpatialDim = "spatial".equals(descriptions.get(dimension));
        ByteBufferWriter<ImmutableRTree> spatialWriter = null;
        RTree tree = null;
        IOPeon spatialIoPeon = new TmpFileIOPeon();
        if (isSpatialDim) {
            spatialWriter = new ByteBufferWriter<ImmutableRTree>(spatialIoPeon, dimension,
                    IndexedRTree.objectStrategy);
            spatialWriter.open();
            tree = new RTree(2, new LinearGutmanSplitStrategy(0, 50));
        }

        for (String dimVal : IndexedIterable.create(dimVals)) {
            progress.progress();
            List<Iterable<Integer>> convertedInverteds = Lists.newArrayListWithCapacity(indexes.size());
            for (int j = 0; j < indexes.size(); ++j) {
                convertedInverteds.add(new ConvertingIndexedInts(indexes.get(j).getInverteds(dimension, dimVal),
                        rowNumConversions.get(j)));
            }

            ConciseSet bitset = new ConciseSet();
            for (Integer row : CombiningIterable.createSplatted(convertedInverteds,
                    Ordering.<Integer>natural().nullsFirst())) {
                if (row != INVALID_ROW) {
                    bitset.add(row);
                }
            }

            writer.write(ImmutableConciseSet.newImmutableFromMutable(bitset));

            if (isSpatialDim && dimVal != null) {
                List<String> stringCoords = Lists.newArrayList(SPLITTER.split(dimVal));
                float[] coords = new float[stringCoords.size()];
                for (int j = 0; j < coords.length; j++) {
                    coords[j] = Float.valueOf(stringCoords.get(j));
                }
                tree.insert(coords, bitset);
            }
        }
        writer.close();

        serializerUtils.writeString(out, dimension);
        ByteStreams.copy(writer.combineStreams(), out);
        ioPeon.cleanup();

        log.info("Completed dimension[%s] in %,d millis.", dimension,
                System.currentTimeMillis() - dimStartTime);

        if (isSpatialDim) {
            spatialWriter.write(ImmutableRTree.newImmutableFromMutable(tree));
            spatialWriter.close();

            serializerUtils.writeString(spatialOut, dimension);
            ByteStreams.copy(spatialWriter.combineStreams(), spatialOut);
            spatialIoPeon.cleanup();
        }

    }

    log.info("outDir[%s] completed inverted.drd in %,d millis.", v8OutDir,
            System.currentTimeMillis() - startTime);

    final ArrayList<String> expectedFiles = Lists.newArrayList(Iterables.concat(
            Arrays.asList("index.drd", "inverted.drd", "spatial.drd",
                    String.format("time_%s.drd", IndexIO.BYTE_ORDER)),
            Iterables.transform(mergedDimensions, GuavaUtils.formatFunction("dim_%s.drd")),
            Iterables.transform(mergedMetrics,
                    GuavaUtils.formatFunction(String.format("met_%%s_%s.drd", IndexIO.BYTE_ORDER)))));

    Map<String, File> files = Maps.newLinkedHashMap();
    for (String fileName : expectedFiles) {
        files.put(fileName, new File(v8OutDir, fileName));
    }

    File smooshDir = new File(v8OutDir, "smoosher");
    smooshDir.mkdir();

    for (Map.Entry<String, File> entry : Smoosh.smoosh(v8OutDir, smooshDir, files).entrySet()) {
        entry.getValue().delete();
    }

    for (File file : smooshDir.listFiles()) {
        Files.move(file, new File(v8OutDir, file.getName()));
    }

    if (!smooshDir.delete()) {
        log.info("Unable to delete temporary dir[%s], contains[%s]", smooshDir,
                Arrays.asList(smooshDir.listFiles()));
        throw new IOException(String.format("Unable to delete temporary dir[%s]", smooshDir));
    }

    createIndexDrdFile(IndexIO.V8_VERSION, v8OutDir,
            GenericIndexed.fromIterable(mergedDimensions, GenericIndexed.stringStrategy),
            GenericIndexed.fromIterable(mergedMetrics, GenericIndexed.stringStrategy), dataInterval);

    IndexIO.DefaultIndexIOHandler.convertV8toV9(v8OutDir, outDir);
    FileUtils.deleteDirectory(v8OutDir);

    return outDir;
}

From source file:Util.java

public void write(File dest, InputStream in) {
    FileChannel channel = null;
    try {/* ww w.j a va  2  s. com*/
        channel = new FileOutputStream(dest).getChannel();
    } catch (FileNotFoundException e) {
        System.out.println(e);
    }
    try {
        int byteCount = 0;
        byte[] buffer = new byte[BUFFER_SIZE];
        int bytesRead = -1;
        while ((bytesRead = in.read(buffer)) != -1) {
            ByteBuffer byteBuffer = ByteBuffer.wrap(buffer, 0, bytesRead);
            channel.write(byteBuffer);
            byteCount += bytesRead;
        }

    } catch (IOException e) {
        System.out.println(e);
    } finally {

        try {
            if (channel != null) {
                channel.close();
            }
            if (in != null) {
                in.close();
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
}

From source file:org.zuinnote.hadoop.office.format.common.parser.msexcel.internal.EncryptedCachedDiskStringsTable.java

/**
 * Adds a string to the table on disk// w  ww  .  java 2 s .  c om
 * 
 * @param str string to store
 * @param os  OutputStream to use to write it
 * @throws IOException
 */
private void addString(String str, OutputStream os) throws IOException {
    if (this.cacheSize >= 0) { // add to disk
        byte[] strbytes = str.getBytes(EncryptedCachedDiskStringsTable.encoding);
        byte[] sizeOfStr = ByteBuffer.allocate(4).putInt(strbytes.length).array();
        this.stringPositionInFileList.add(this.tempFileSize);
        if (os != null) {
            os.write(sizeOfStr);
            os.write(strbytes);
        } else { // we can write to the random access file
            FileChannel fc = this.tempRAF.getChannel().position(this.tempFileSize);
            fc.write(ByteBuffer.wrap(sizeOfStr));
            fc.write(ByteBuffer.wrap(strbytes));
        }
        this.tempFileSize += sizeOfStr.length + strbytes.length;
    }
    if (this.cacheSize < 0) { // put it into cache
        this.cache.put(this.currentItem, str);
        this.currentItem++;
    } else if ((this.cacheSize > 0) && (this.currentItem < this.cacheSize)) { // put the first items already into
        // cache
        this.cache.put(this.currentItem, str);
        this.currentItem++;
    }
}

From source file:com.doplgangr.secrecy.FileSystem.File.java

public java.io.File readFile(CryptStateListener listener) {
    decrypting = true;//from  w w  w.  j  a  v  a2 s.c om
    InputStream is = null;
    OutputStream out = null;
    java.io.File outputFile = null;
    try {
        outputFile = java.io.File.createTempFile("tmp" + name, "." + FileType, storage.getTempFolder());
        outputFile.mkdirs();
        outputFile.createNewFile();
        AES_Encryptor enc = new AES_Encryptor(key);
        is = new CipherInputStream(new FileInputStream(file), enc.decryptstream());
        listener.setMax((int) file.length());
        ReadableByteChannel inChannel = Channels.newChannel(is);
        FileChannel outChannel = new FileOutputStream(outputFile).getChannel();
        ByteBuffer byteBuffer = ByteBuffer.allocate(Config.bufferSize);
        while (inChannel.read(byteBuffer) >= 0 || byteBuffer.position() > 0) {
            byteBuffer.flip();
            outChannel.write(byteBuffer);
            byteBuffer.compact();
            listener.updateProgress((int) outChannel.size());
        }
        inChannel.close();
        outChannel.close();
        Util.log(outputFile.getName(), outputFile.length());
        return outputFile;
    } catch (FileNotFoundException e) {
        listener.onFailed(2);
        Util.log("Encrypted File is missing", e.getMessage());
    } catch (IOException e) {
        Util.log("IO Exception while decrypting", e.getMessage());
        if (e.getMessage().contains("pad block corrupted"))
            listener.onFailed(1);
        else
            e.printStackTrace();
    } catch (Exception e) {
        e.printStackTrace();
    } finally {
        listener.Finished();
        decrypting = false;
        try {
            if (is != null) {
                is.close();
            }
            if (out != null) {
                out.close();
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
    // An error occured. Too Bad
    if (outputFile != null)
        storage.purgeFile(outputFile);
    return null;
}

From source file:org.apache.axiom.attachments.impl.BufferUtils.java

/**
 * Opimized writing to FileOutputStream using a channel
 * @param is//ww w.ja  v  a  2 s  .c o m
 * @param fos
 * @return false if lock was not aquired
 * @throws IOException
 */
public static boolean inputStream2FileOutputStream(InputStream is, FileOutputStream fos) throws IOException {

    // See if a file channel and lock can be obtained on the FileOutputStream
    FileChannel channel = null;
    FileLock lock = null;
    ByteBuffer bb = null;
    try {
        channel = fos.getChannel();
        if (channel != null) {
            lock = channel.tryLock();
        }
        bb = getTempByteBuffer();
    } catch (Throwable t) {
    }
    if (lock == null || bb == null || !bb.hasArray()) {
        releaseTempByteBuffer(bb);
        return false; // lock could not be set or bb does not have direct array access
    }

    try {

        // Read directly into the ByteBuffer array
        int bytesRead = is.read(bb.array());
        // Continue reading until no bytes are read and no
        // bytes are now available.
        while (bytesRead > 0 || is.available() > 0) {
            if (bytesRead > 0) {
                int written = 0;

                if (bytesRead < BUFFER_LEN) {
                    // If the ByteBuffer is not full, allocate a new one
                    ByteBuffer temp = ByteBuffer.allocate(bytesRead);
                    temp.put(bb.array(), 0, bytesRead);
                    temp.position(0);
                    written = channel.write(temp);
                } else {
                    // Write to channel
                    bb.position(0);
                    written = channel.write(bb);
                    bb.clear();
                }

            }

            // REVIEW: Do we need to ensure that bytesWritten is 
            // the same as the number of bytes sent ?

            bytesRead = is.read(bb.array());
        }
    } finally {
        // Release the lock
        lock.release();
        releaseTempByteBuffer(bb);
    }
    return true;
}

From source file:org.eclipse.orion.internal.server.servlets.xfer.ClientImport.java

/**
 * A put is used to send a chunk of a file.
 *///  ww w  . j a  v  a 2  s  . c  o  m
void doPut(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
    int transferred = getTransferred();
    int length = getLength();
    int headerLength = Integer.valueOf(req.getHeader(ProtocolConstants.HEADER_CONTENT_LENGTH));
    String rangeString = req.getHeader(ProtocolConstants.HEADER_CONTENT_RANGE);
    if (rangeString == null)
        rangeString = "bytes 0-" + (length - 1) + '/' + length; //$NON-NLS-1$
    ContentRange range = ContentRange.parse(rangeString);
    if (length != range.getLength()) {
        fail(req, resp, "Chunk specifies an incorrect document length");
        return;
    }
    if (range.getStartByte() > transferred) {
        fail(req, resp, "Chunk missing; Expected start byte: " + transferred);
        return;
    }
    if (range.getEndByte() < range.getStartByte()) {
        fail(req, resp, "Invalid range: " + rangeString);
        return;
    }
    int chunkSize = 1 + range.getEndByte() - range.getStartByte();
    if (chunkSize != headerLength) {
        fail(req, resp, "Content-Range doesn't agree with Content-Length");
        return;
    }
    byte[] chunk = readChunk(req, chunkSize);
    FileOutputStream fout = null;
    try {
        fout = new FileOutputStream(new File(getStorageDirectory(), FILE_DATA), true);
        FileChannel channel = fout.getChannel();
        channel.position(range.getStartByte());
        channel.write(ByteBuffer.wrap(chunk));
        channel.close();
    } finally {
        try {
            if (fout != null)
                fout.close();
        } catch (IOException e) {
            //ignore secondary failure
        }
    }
    transferred = range.getEndByte() + 1;
    setTransferred(transferred);
    save();
    if (transferred >= length) {
        completeTransfer(req, resp);
        return;
    }
    resp.setStatus(308);//Resume Incomplete
    resp.setHeader("Range", "bytes 0-" + range.getEndByte()); //$NON-NLS-2$
    setResponseLocationHeader(req, resp);
}

From source file:siddur.solidtrust.fault.FaultController.java

@RequestMapping(value = "/upload", method = RequestMethod.POST)
public String handleFormUpload(@RequestParam("file") MultipartFile file, @RequestParam("version") int v,
        Model model, HttpSession session) throws Exception {

    IFaultPersister persister = getPersister(v);

    //upload/*from   w w w  . jav a  2s .  co m*/
    log4j.info("Start uploading file: " + file.getName() + " with size: " + file.getSize());
    File temp = File.createTempFile("data", ".csv");
    log4j.info("Will save to " + temp.getAbsolutePath());

    InputStream in = null;
    FileOutputStream fout = null;

    try {
        fout = new FileOutputStream(temp);
        FileChannel fcout = fout.getChannel();

        in = file.getInputStream();
        ReadableByteChannel cin = Channels.newChannel(in);

        ByteBuffer buf = ByteBuffer.allocate(1024 * 8);
        while (true) {
            buf.clear();

            int r = cin.read(buf);

            if (r == -1) {
                break;
            }

            buf.flip();
            fcout.write(buf);
        }
    } finally {
        if (in != null) {
            in.close();
        }
        if (fout != null) {
            fout.close();
        }
    }
    log4j.info("Uploading complete");

    //fields
    BufferedReader br = null;
    int[] orders;
    try {
        in = new FileInputStream(temp);
        br = new BufferedReader(new InputStreamReader(in));

        //first line for fields
        String firstLine = br.readLine();
        orders = persister.validateTitle(firstLine);

        //persist
        persister.parseAndSave(br, orders, persister);
    } finally {
        if (br != null) {
            br.close();
        }
    }

    return "redirect:upload.html";
}

From source file:siddur.solidtrust.newprice2.Newprice2Controller.java

@RequestMapping(value = "/upload", method = RequestMethod.POST)
public String handleFormUpload(@RequestParam("file") MultipartFile file, Model model, HttpSession session)
        throws IOException {

    //upload// ww  w.  j  a va  2  s .  c  o  m
    log4j.info("Start uploading file: " + file.getName() + " with size: " + file.getSize());
    File temp = File.createTempFile("data", ".csv");
    log4j.info("Will save to " + temp.getAbsolutePath());

    InputStream in = null;
    FileOutputStream fout = null;

    try {
        fout = new FileOutputStream(temp);
        FileChannel fcout = fout.getChannel();

        in = file.getInputStream();
        ReadableByteChannel cin = Channels.newChannel(in);

        ByteBuffer buf = ByteBuffer.allocate(1024 * 8);
        while (true) {
            buf.clear();

            int r = cin.read(buf);

            if (r == -1) {
                break;
            }

            buf.flip();
            fcout.write(buf);
        }
    } finally {
        if (in != null) {
            in.close();
        }
        if (fout != null) {
            fout.close();
        }
    }
    FileStatus fs = new FileStatus();
    fs.setFile(temp);
    log4j.info("Uploading complete");

    //fields
    BufferedReader br = null;
    int[] orders;
    String[] fields;
    try {
        in = new FileInputStream(temp);
        br = new BufferedReader(new InputStreamReader(in));

        //first line for fields
        String firstLine = br.readLine();
        fields = StringUtils.split(firstLine, ";");
        ;
        orders = new int[fields.length];
        for (int i = 0; i < orders.length; i++) {
            orders[i] = ArrayUtils.indexOf(FIELDS, fields[i].trim());
        }

        //count
        while (br.readLine() != null) {
            fs.next();
        }
    } finally {
        if (br != null) {
            br.close();
        }
    }

    fs.flip();
    log4j.info("Total rows: " + fs.getTotalRow());

    //persist
    carService.saveCars(fs, orders, carService);
    return "redirect:/v2/upload.html";
}

From source file:voldemort.store.cachestore.impl.ChannelStore.java

private boolean checkSignature(FileChannel channel) throws IOException {
    ByteBuffer intBytes = ByteBuffer.allocate(OFFSET);
    if (channel.size() == 0) {
        intBytes.putInt(MAGIC);/*from w w w. ja v  a 2 s.com*/
        intBytes.flip();
        channel.write(intBytes);
    } else {
        channel.read(intBytes);
        intBytes.rewind();
        int s = intBytes.getInt();
        if (s != MAGIC)
            throw new StoreException(
                    "Header mismatch expect " + Integer.toHexString(MAGIC) + " read " + Integer.toHexString(s));
    }
    return true;
}

From source file:siddur.solidtrust.classic.ClassicController.java

@RequestMapping(value = "/upload", method = RequestMethod.POST)
public String handleFormUpload(@RequestParam("file") MultipartFile file, Model model, HttpSession session)
        throws Exception {

    //upload//  w w  w  .  j a v a2 s . c om
    log4j.info("Start uploading file: " + file.getName() + " with size: " + file.getSize());
    File temp = File.createTempFile("data", ".csv");
    log4j.info("Will save to " + temp.getAbsolutePath());

    InputStream in = null;
    FileOutputStream fout = null;

    try {
        fout = new FileOutputStream(temp);
        FileChannel fcout = fout.getChannel();

        in = file.getInputStream();
        ReadableByteChannel cin = Channels.newChannel(in);

        ByteBuffer buf = ByteBuffer.allocate(1024 * 8);
        while (true) {
            buf.clear();

            int r = cin.read(buf);

            if (r == -1) {
                break;
            }

            buf.flip();
            fcout.write(buf);
        }
    } finally {
        if (in != null) {
            in.close();
        }
        if (fout != null) {
            fout.close();
        }
    }
    log4j.info("Uploading complete");

    //fields
    BufferedReader br = null;
    int[] orders;
    try {
        in = new FileInputStream(temp);
        br = new BufferedReader(new InputStreamReader(in));

        //first line for fields
        String firstLine = br.readLine();
        orders = persister.validateTitle(firstLine);

        //persist
        persister.parseAndSave(br, orders, persister);
    } finally {
        if (br != null) {
            br.close();
        }
    }

    return "redirect:upload.html";
}