Example usage for org.apache.commons.io.input CountingInputStream getByteCount

List of usage examples for org.apache.commons.io.input CountingInputStream getByteCount

Introduction

In this page you can find the example usage for org.apache.commons.io.input CountingInputStream getByteCount.

Prototype

public synchronized long getByteCount() 

Source Link

Document

The number of bytes that have passed through this stream.

Usage

From source file:org.exoplatform.document.util.FileUtils.java

/**
 * Returns the size of the specified file or directory. If the provided File
 * is a regular file, then the file's length is returned. If the argument is
 * a directory, then the size of the directory is calculated recursively. If
 * a directory or subdirectory is security restricted, its size will not be
 * included./*from   ww w  .jav  a 2  s.  c o  m*/
 * 
 * @param inputStream
 *            - the input stream
 * @param fileName
 *            - the file's name
 * 
 * @return the length of the file, or recursive size of the directory,
 *         provided (in bytes).
 * @throws NullPointerException
 *             - if the file is null
 * @throws IllegalArgumentException
 *             - if the file does not exist.
 */
public static long sizeOf(InputStream inputStream, String fileName) throws FileException {
    FileOutputStream fileOutputStream = null;
    CountingInputStream countingInputStream = null;

    long sizeOfFile = 0;
    try {
        fileName = FileNameUtils.getName(fileName);
        fileOutputStream = new FileOutputStream(new File(FilePathUtils.ROOT_PATH + fileName));
        countingInputStream = new CountingInputStream(inputStream);

        IOUtils.copyLarge(countingInputStream, fileOutputStream);
        sizeOfFile = countingInputStream.getByteCount();
    } catch (Exception ex) {
        sizeOfFile = 0;
    } finally {
        IOUtils.closeQuietly(countingInputStream);
        IOUtils.closeQuietly(fileOutputStream);
    }

    return sizeOfFile;
}

From source file:org.hardisonbrewing.s3j.FileResponseHandler.java

@Override
public File handleResponse(HttpResponse httpResponse) throws HttpResponseException, IOException {

    System.out.println("  Response Headers");
    HttpUtil.printHeaders(httpResponse);

    HttpUtil.validateResponseCode(httpResponse);

    File file = FileUtils.createTempFile();

    long contentLength = 0;

    HttpEntity entity = null;/*ww  w.  j  av  a  2s. c  o m*/
    InputStream inputStream = null;
    CountingInputStream countingInputStream = null;
    OutputStream outputStream = null;

    try {

        entity = httpResponse.getEntity();

        contentLength = entity.getContentLength();
        validateContentLength(contentLength);

        inputStream = entity.getContent();

        inputStream = new ProgressInputStream(inputStream, contentLength);

        // put this before the cipher so we get the encrypted length
        countingInputStream = new CountingInputStream(inputStream);
        inputStream = countingInputStream;

        if (cipher != null) {
            inputStream = new CipherInputStream(inputStream, cipher);
        }

        outputStream = new FileOutputStream(file);

        IOUtil.copy(inputStream, outputStream);
    } finally {
        IOUtil.close(inputStream);
        EntityUtils.consume(entity);
        IOUtil.close(outputStream);
    }

    long readLength = countingInputStream.getByteCount();
    validateDownloadLength(contentLength, readLength);

    return file;
}

From source file:org.jcodec.codecs.mjpeg.JpegParser.java

public CodedImage parse(PushbackInputStream is, CountingInputStream counter) throws IOException {
    CodedImage image = new CodedImage();
    int curQTable = 0;
    while (true) {
        int marker = is.read();
        if (marker == -1)
            return image;
        if (marker == 0)
            continue;
        if (marker != 0xFF)
            throw new RuntimeException("@" + Long.toHexString(counter.getByteCount()) + " Marker expected: 0x"
                    + Integer.toHexString(marker));

        int b = is.read();
        Debug.trace("%s", Markers.toString(b));
        switch (b) {
        case Markers.SOF0:
            image.frame = FrameHeader.read(is);
            Debug.trace("    %s", image.frame);
            break;
        case Markers.DHT:
            int len1 = readShort(is);
            CountingInputStream cis = new CountingInputStream(is);
            while (cis.getCount() < len1 - 2) {
                readHuffmanTable(cis, image);
            }/*from  ww w  .ja  va 2 s  .c o m*/
            break;
        case Markers.DQT:
            int len4 = readShort(is);
            CountingInputStream cis1 = new CountingInputStream(is);
            while (cis1.getCount() < len4 - 2) {
                QuantTable quantTable = readQuantTable(cis1);
                if (curQTable == 0)
                    image.setQuantLum(quantTable);
                else
                    image.setQuantChrom(quantTable);
                curQTable++;
            }
            break;
        case Markers.SOS:
            if (image.scan != null) {
                throw new IllegalStateException("unhandled - more than one scan header");
            }
            image.scan = ScanHeader.read(is);
            Debug.trace("    %s", image.scan);
            image.setData(readData(is));
            break;
        case Markers.SOI:
            break;
        case Markers.EOI:
            return image;
        case Markers.APP0:
            // int len10 = readShort(is);
            // byte[] id = new byte[4];
            // is.read(id);
            // if (!Arrays.equals(JFIF, id))
            // throw new RuntimeException("Not a JFIF file");
            // is.skip(1);
            //
            // is.skip(2);
            // int units = is.read();
            // int dx = readShort(is);
            // int dy = readShort(is);
            // int tx = is.read();
            // int ty = is.read();
            // is.skip(tx * ty * 3);
            // break;
        case Markers.APP1:
        case Markers.APP2:
        case Markers.APP3:
        case Markers.APP4:
        case Markers.APP5:
        case Markers.APP6:
        case Markers.APP7:
        case Markers.APP8:
        case Markers.APP9:
        case Markers.APPA:
        case Markers.APPB:
        case Markers.APPC:
        case Markers.APPD:
        case Markers.APPE:
        case Markers.APPF:
            int len3 = readShort(is);
            StringReader.sureSkip(is, len3 - 2);
            break;
        case Markers.DRI:
            /*
             * Lr: Define restart interval segment length  Specifies the
             * length of the parameters in the DRI segment shown in Figure
             * B.9 (see B.1.1.4).
             */
            int lr = readShort(is);
            // Ri: Restart interval  Specifies the number of MCU in the
            // restart interval.
            int ri = readShort(is);
            Debug.trace("DRI Lr: %d Ri: %d", lr, ri);
            // A DRI marker segment with Ri equal to zero shall disable
            // restart intervals for the following scans.
            Asserts.assertEquals(0, ri);
            break;
        default: {
            throw new IllegalStateException("unhandled marker " + Markers.toString(b));
        }
        }
    }
}

From source file:org.sead.repositories.reference.RefRepository.java

private JsonNode getItem(String item, File indexFile, CountingInputStream cis, boolean withChildren,
        Long oreFileSize, long curOffset, ArrayList<String> entries, ArrayList<Long> offsets)
        throws JsonParseException, JsonMappingException, IOException {
    log.trace("Getting: " + item + " with starting offset: " + curOffset);

    long curPos = curOffset;

    if ((entries == null) || (offsets == null)) {
        entries = new ArrayList<String>();
        offsets = new ArrayList<Long>();

        FileInputStream fis = new FileInputStream(indexFile);
        JsonFactory f = new MappingJsonFactory();
        JsonParser jp = f.createParser(fis);

        JsonToken current;//from  w ww  . j  av a  2  s  . c o  m
        log.trace("Reading Index file");
        current = jp.nextToken(); // Start object

        while ((current = jp.nextToken()) != null) {
            if (current.equals(JsonToken.FIELD_NAME)) {
                String fName = jp.getText();
                current = jp.nextToken(); // Get to start of
                // value
                long offset = jp.getLongValue();
                log.trace("Adding: " + fName + " : " + offset);
                entries.add(fName);
                offsets.add(offset);
            }
        }
        try {
            fis.close();
        } catch (Exception e) {
            log.debug(e.getMessage());
        }

    }

    byte[] b = null;
    int bytesRead = 0;

    int index = entries.indexOf(item);
    if (index == -1) {
        log.warn(item + " not in index");
    }
    // getSizeEstimateFor(index)
    int estSize;
    if (index < offsets.size() - 1) {
        estSize = (int) (offsets.get(index + 1) - offsets.get(index));
    } else {
        estSize = (int) (oreFileSize - offsets.get(index));
    }
    curPos += skipTo(cis, curPos, offsets.get(index));
    log.trace("Current Pos updated to : " + curPos);
    b = new byte[estSize];
    bytesRead = cis.read(b);
    log.trace("Read " + bytesRead + " bytes");
    if (bytesRead == estSize) {
        log.trace("Read: " + new String(b));
        InputStream is = new ByteArrayInputStream(b);
        // mapper seems to be OK ignoring a last char such as a comma after
        // the object/tree
        ObjectNode resultNode = (ObjectNode) mapper.readTree(is);
        try {
            is.close();
        } catch (Exception e) {
            log.debug(e.getMessage());
        }

        curPos += bytesRead;
        log.trace("curPos: " + curPos + " : count: " + cis.getByteCount());

        log.trace(resultNode.toString());
        if ((resultNode.has("Has Part")) && withChildren) {
            resultNode = getChildren(resultNode, indexFile, cis, oreFileSize, curPos, entries, offsets);
        } else {
            resultNode.remove("aggregates");
        }
        /*
         * if (args[2] != null) { long offset2 = Long.parseLong(args[2]);
         * sbc.position(offset2); b.clear(); sbc.read(b);
         * 
         * InputStream is2 = new ByteArrayInputStream(b.array());
         * 
         * JsonNode node2 = mapper.readTree(is2);
         * System.out.println(node2.toString()); is2.close(); }
         */
        return resultNode;
    } else {
        return null;
    }

}

From source file:org.sead.repositories.reference.RefRepository.java

private ObjectNode getChildren(ObjectNode resultNode, File indexFile, CountingInputStream cis, Long oreFileSize,
        long curPos, ArrayList<String> entries, ArrayList<Long> offsets)
        throws JsonParseException, JsonMappingException, IOException {

    ArrayList<String> childIds = new ArrayList<String>();
    JsonNode children = resultNode.get("Has Part");
    if (children.isArray()) {
        for (JsonNode child : children) {
            childIds.add(child.textValue());
        }//from   w w w .j  a v a2 s. co  m
    } else {
        System.out.println("Has Part not an array");
        childIds.add(children.textValue());
    }
    ArrayNode aggregates = mapper.createArrayNode();
    for (String name : childIds) {
        aggregates.add(getItem(name, indexFile, cis, false, oreFileSize, curPos, entries, offsets));
        curPos = cis.getByteCount();
        log.trace("curPos updated to " + curPos + " after reading: " + name);

    }
    log.trace("Child Ids: " + childIds.toString());
    resultNode.set("aggregates", aggregates);
    return resultNode;

}

From source file:password.pwm.util.localdb.LocalDBUtility.java

private void importLocalDB(final InputStream inputStream, final Appendable out, final long totalBytes)
        throws PwmOperationalException, IOException {
    this.prepareForImport();

    importLineCounter = 0;/* w  w  w.  j a  v a 2  s. c o  m*/
    if (totalBytes > 0) {
        writeStringToOut(out, "total bytes in localdb import source: " + totalBytes);
    }

    writeStringToOut(out, "beginning localdb import...");

    final Instant startTime = Instant.now();
    final TransactionSizeCalculator transactionCalculator = new TransactionSizeCalculator(
            new TransactionSizeCalculator.SettingsBuilder()
                    .setDurationGoal(new TimeDuration(100, TimeUnit.MILLISECONDS)).setMinTransactions(50)
                    .setMaxTransactions(5 * 1000).createSettings());

    final Map<LocalDB.DB, Map<String, String>> transactionMap = new HashMap<>();
    for (final LocalDB.DB loopDB : LocalDB.DB.values()) {
        transactionMap.put(loopDB, new TreeMap<>());
    }

    final CountingInputStream countingInputStream = new CountingInputStream(inputStream);
    final EventRateMeter eventRateMeter = new EventRateMeter(TimeDuration.MINUTE);

    final Timer statTimer = new Timer(true);
    statTimer.scheduleAtFixedRate(new TimerTask() {
        @Override
        public void run() {
            String output = "";
            if (totalBytes > 0) {
                final ProgressInfo progressInfo = new ProgressInfo(startTime, totalBytes,
                        countingInputStream.getByteCount());
                output += progressInfo.debugOutput();
            } else {
                output += "recordsImported=" + importLineCounter;
            }
            output += ", avgTransactionSize=" + transactionCalculator.getTransactionSize()
                    + ", recordsPerMinute=" + eventRateMeter.readEventRate().setScale(2, BigDecimal.ROUND_DOWN);
            writeStringToOut(out, output);
        }
    }, 30 * 1000, 30 * 1000);

    Reader csvReader = null;
    try {
        csvReader = new InputStreamReader(new GZIPInputStream(countingInputStream, GZIP_BUFFER_SIZE),
                PwmConstants.DEFAULT_CHARSET);
        for (final CSVRecord record : PwmConstants.DEFAULT_CSV_FORMAT.parse(csvReader)) {
            importLineCounter++;
            eventRateMeter.markEvents(1);
            final String dbName_recordStr = record.get(0);
            final LocalDB.DB db = JavaHelper.readEnumFromString(LocalDB.DB.class, null, dbName_recordStr);
            final String key = record.get(1);
            final String value = record.get(2);
            if (db == null) {
                writeStringToOut(out, "ignoring localdb import record #" + importLineCounter
                        + ", invalid DB name '" + dbName_recordStr + "'");
            } else {
                transactionMap.get(db).put(key, value);
                int cachedTransactions = 0;
                for (final LocalDB.DB loopDB : LocalDB.DB.values()) {
                    cachedTransactions += transactionMap.get(loopDB).size();
                }
                if (cachedTransactions >= transactionCalculator.getTransactionSize()) {
                    final long startTxnTime = System.currentTimeMillis();
                    for (final LocalDB.DB loopDB : LocalDB.DB.values()) {
                        localDB.putAll(loopDB, transactionMap.get(loopDB));
                        transactionMap.get(loopDB).clear();
                    }
                    transactionCalculator.recordLastTransactionDuration(TimeDuration.fromCurrent(startTxnTime));
                }
            }
        }
    } finally {
        LOGGER.trace("import process completed");
        statTimer.cancel();
        IOUtils.closeQuietly(csvReader);
        IOUtils.closeQuietly(countingInputStream);
    }

    for (final LocalDB.DB loopDB : LocalDB.DB.values()) {
        localDB.putAll(loopDB, transactionMap.get(loopDB));
        transactionMap.get(loopDB).clear();
    }

    this.markImportComplete();

    writeStringToOut(out, "restore complete, restored " + importLineCounter + " records in "
            + TimeDuration.fromCurrent(startTime).asLongString());
    statTimer.cancel();
}