Example usage for org.apache.commons.io FileUtils byteCountToDisplaySize

List of usage examples for org.apache.commons.io FileUtils byteCountToDisplaySize

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils byteCountToDisplaySize.

Prototype

public static String byteCountToDisplaySize(long size) 

Source Link

Document

Returns a human-readable version of the file size, where the input represents a specific number of bytes.

Usage

From source file:it.anyplace.sync.bep.BlockPuller.java

public FileDownloadObserver pullBlocks(FileBlocks fileBlocks) throws InterruptedException {
    logger.info("pulling file = {}", fileBlocks);
    checkArgument(connectionHandler.hasFolder(fileBlocks.getFolder()),
            "supplied connection handler %s will not share folder %s", connectionHandler,
            fileBlocks.getFolder());//from w  w  w. j av a2s  . co m
    final Object lock = new Object();
    final AtomicReference<Exception> error = new AtomicReference<>();
    final Object listener = new Object() {
        @Subscribe
        public void handleResponseMessageReceivedEvent(ResponseMessageReceivedEvent event) {
            synchronized (lock) {
                try {
                    if (!requestIds.contains(event.getMessage().getId())) {
                        return;
                    }
                    checkArgument(equal(event.getMessage().getCode(), ErrorCode.NO_ERROR),
                            "received error response, code = %s", event.getMessage().getCode());
                    byte[] data = event.getMessage().getData().toByteArray();
                    String hash = BaseEncoding.base16().encode(Hashing.sha256().hashBytes(data).asBytes());
                    blockCache.pushBlock(data);
                    if (missingHashes.remove(hash)) {
                        blocksByHash.put(hash, data);
                        logger.debug("aquired block, hash = {}", hash);
                        lock.notify();
                    } else {
                        logger.warn("received not-needed block, hash = {}", hash);
                    }
                } catch (Exception ex) {
                    error.set(ex);
                    lock.notify();
                }
            }
        }
    };
    FileDownloadObserver fileDownloadObserver = new FileDownloadObserver() {

        private long getReceivedData() {
            return blocksByHash.size() * BLOCK_SIZE;
        }

        private long getTotalData() {
            return (blocksByHash.size() + missingHashes.size()) * BLOCK_SIZE;
        }

        @Override
        public double getProgress() {
            return isCompleted() ? 1d : getReceivedData() / ((double) getTotalData());
        }

        @Override
        public String getProgressMessage() {
            return (Math.round(getProgress() * 1000d) / 10d) + "% "
                    + FileUtils.byteCountToDisplaySize(getReceivedData()) + " / "
                    + FileUtils.byteCountToDisplaySize(getTotalData());
        }

        @Override
        public boolean isCompleted() {
            return missingHashes.isEmpty();
        }

        @Override
        public void checkError() {
            if (error.get() != null) {
                throw new RuntimeException(error.get());
            }
        }

        @Override
        public double waitForProgressUpdate() throws InterruptedException {
            if (!isCompleted()) {
                synchronized (lock) {
                    checkError();
                    lock.wait();
                    checkError();
                }
            }
            return getProgress();
        }

        @Override
        public InputStream getInputStream() {
            checkArgument(missingHashes.isEmpty(), "pull failed, some blocks are still missing");
            List<byte[]> blockList = Lists
                    .newArrayList(Lists.transform(hashList, Functions.forMap(blocksByHash)));
            return new SequenceInputStream(Collections
                    .enumeration(Lists.transform(blockList, new Function<byte[], ByteArrayInputStream>() {
                        @Override
                        public ByteArrayInputStream apply(byte[] data) {
                            return new ByteArrayInputStream(data);
                        }
                    })));
        }

        @Override
        public void close() {
            missingHashes.clear();
            hashList.clear();
            blocksByHash.clear();
            try {
                connectionHandler.getEventBus().unregister(listener);
            } catch (Exception ex) {
            }
            if (closeConnection) {
                connectionHandler.close();
            }
        }
    };
    try {
        synchronized (lock) {
            hashList.addAll(Lists.transform(fileBlocks.getBlocks(), new Function<BlockInfo, String>() {
                @Override
                public String apply(BlockInfo block) {
                    return block.getHash();
                }
            }));
            missingHashes.addAll(hashList);
            for (String hash : missingHashes) {
                byte[] block = blockCache.pullBlock(hash);
                if (block != null) {
                    blocksByHash.put(hash, block);
                    missingHashes.remove(hash);
                }
            }
            connectionHandler.getEventBus().register(listener);
            for (BlockInfo block : fileBlocks.getBlocks()) {
                if (missingHashes.contains(block.getHash())) {
                    int requestId = Math.abs(new Random().nextInt());
                    requestIds.add(requestId);
                    connectionHandler.sendMessage(Request.newBuilder().setId(requestId)
                            .setFolder(fileBlocks.getFolder()).setName(fileBlocks.getPath())
                            .setOffset(block.getOffset()).setSize(block.getSize())
                            .setHash(ByteString.copyFrom(BaseEncoding.base16().decode(block.getHash())))
                            .build());
                    logger.debug("sent request for block, hash = {}", block.getHash());
                }
            }
            return fileDownloadObserver;
        }
    } catch (Exception ex) {
        fileDownloadObserver.close();
        throw ex;
    }
}

From source file:com.talis.entity.db.EntityDatabasePerfTestBase.java

private void testClearingSingleGraph(int graphs, int stmtsPerGraph) throws EntityDatabaseException {

    int stmtsPerSubject = 20;
    int subjectCount = Math.round(stmtsPerGraph / stmtsPerSubject);
    long start = System.currentTimeMillis();
    db.begin();/*from ww  w.  ja v  a  2s .  c  o m*/

    int quadCount = 0;

    for (int i = 0; i < graphs; i++) {
        Node thisGraph = Node.createURI(graph.getURI() + "/" + i);
        for (int j = 0; j < subjectCount; j++) {
            Node thisSubject = Node.createURI(subject.getURI() + "/" + i + "/" + j);
            db.put(thisSubject, thisGraph, getQuads(thisGraph, thisSubject, stmtsPerSubject));
            quadCount += stmtsPerSubject;
        }
    }
    db.commit();
    System.out.println(String.format("Populated %s graphs (%s total statements) in %s ms", graphs, quadCount,
            (System.currentTimeMillis() - start)));

    int iter = Math.round(graphs / 4);
    Set<Node> deleted = new HashSet<Node>();
    Random r = new Random();
    start = System.currentTimeMillis();
    while (deleted.size() < iter) {
        Node thisGraph = Node.createURI(graph.getURI() + "/" + r.nextInt(graphs));
        if (!deleted.contains(thisGraph)) {
            db.deleteGraph(thisGraph);
            deleted.add(thisGraph);
        }
    }

    long end = System.currentTimeMillis();
    long duration = end - start;
    System.out.println(String.format("Iterations: %s, Total: %s, PerOp: %s", iter, duration,
            (double) ((double) duration / (double) iter)));
    long size = FileUtils.sizeOfDirectory(tmpDir.getRoot());
    System.out.println(String.format("Size on disk : %s (%s)", FileUtils.byteCountToDisplaySize(size), size));

}

From source file:com.linkedin.drelephant.tez.heuristics.MapperTimeHeuristic.java

public HeuristicResult apply(TezApplicationData data) {
    if (!data.getSucceeded()) {
        return null;
    }//from w ww  . ja  va2  s. c  o  m
    TezTaskData[] tasks = data.getMapTaskData();

    List<Long> inputSizes = new ArrayList<Long>();
    List<Long> runtimesMs = new ArrayList<Long>();
    long taskMinMs = Long.MAX_VALUE;
    long taskMaxMs = 0;

    for (TezTaskData task : tasks) {

        if (task.isSampled()) {
            long inputByte = 0;
            for (TezCounterData.CounterName counterName : _counterNames) {
                inputByte += task.getCounters().get(counterName);
            }
            inputSizes.add(inputByte);
            long taskTime = task.getTotalRunTimeMs();
            runtimesMs.add(taskTime);
            taskMinMs = Math.min(taskMinMs, taskTime);
            taskMaxMs = Math.max(taskMaxMs, taskTime);
        }
    }

    if (taskMinMs == Long.MAX_VALUE) {
        taskMinMs = 0;
    }

    long averageSize = Statistics.average(inputSizes);
    long averageTimeMs = Statistics.average(runtimesMs);

    Severity shortTaskSeverity = shortTaskSeverity(tasks.length, averageTimeMs);
    Severity longTaskSeverity = longTaskSeverity(tasks.length, averageTimeMs);
    Severity severity = Severity.max(shortTaskSeverity, longTaskSeverity);

    HeuristicResult result = new HeuristicResult(_heuristicConfData.getClassName(),
            _heuristicConfData.getHeuristicName(), severity, Utils.getHeuristicScore(severity, tasks.length));

    result.addResultDetail("Number of tasks", Integer.toString(tasks.length));
    result.addResultDetail("Average task input size", FileUtils.byteCountToDisplaySize(averageSize));
    result.addResultDetail("Average task runtime", Statistics.readableTimespan(averageTimeMs));
    result.addResultDetail("Max task runtime", Statistics.readableTimespan(taskMaxMs));
    result.addResultDetail("Min task runtime", Statistics.readableTimespan(taskMinMs));

    return result;
}

From source file:com.linkedin.drelephant.mapreduce.heuristics.GenericDataSkewHeuristic.java

@Override
public HeuristicResult apply(MapReduceApplicationData data) {

    if (!data.getSucceeded()) {
        return null;
    }//from   w  w w  .  java  2  s  .  co  m

    MapReduceTaskData[] tasks = getTasks(data);

    //Gather data
    List<Long> inputBytes = new ArrayList<Long>();

    for (int i = 0; i < tasks.length; i++) {
        if (tasks[i].isSampled()) {
            inputBytes.add(tasks[i].getCounters().get(_counterName));
        }
    }

    // Ratio of total tasks / sampled tasks
    double scale = ((double) tasks.length) / inputBytes.size();
    //Analyze data. TODO: This is a temp fix. findTwogroups should support list as input
    long[][] groups = Statistics.findTwoGroups(Longs.toArray(inputBytes));

    long avg1 = Statistics.average(groups[0]);
    long avg2 = Statistics.average(groups[1]);

    long min = Math.min(avg1, avg2);
    long diff = Math.abs(avg2 - avg1);

    Severity severity = getDeviationSeverity(min, diff);

    //This reduces severity if the largest file sizes are insignificant
    severity = Severity.min(severity, getFilesSeverity(avg2));

    //This reduces severity if number of tasks is insignificant
    severity = Severity.min(severity, Severity.getSeverityAscending(groups[0].length, numTasksLimits[0],
            numTasksLimits[1], numTasksLimits[2], numTasksLimits[3]));

    HeuristicResult result = new HeuristicResult(_heuristicConfData.getClassName(),
            _heuristicConfData.getHeuristicName(), severity, Utils.getHeuristicScore(severity, tasks.length));

    result.addResultDetail("Number of tasks", Integer.toString(tasks.length));
    result.addResultDetail("Group A",
            groups[0].length + " tasks @ " + FileUtils.byteCountToDisplaySize(avg1) + " avg");
    result.addResultDetail("Group B",
            groups[1].length + " tasks @ " + FileUtils.byteCountToDisplaySize(avg2) + " avg");

    return result;
}

From source file:de.berlios.jhelpdesk.model.AdditionalFile.java

/**
 * Zwraca rozmiar plik sformatowany w sposb czytelny dla czowieka.
 * /* w  ww  .  j  av a  2  s .co m*/
 * @return rozmiar plik sformatowany w sposb czytelny dla czowieka
 *
 * @see FileUtils#byteCountToDisplaySize(long)
 */
public String getHumanReadableFileSize() {
    return FileUtils.byteCountToDisplaySize(getFileSize());
}

From source file:com.netflix.genie.web.resources.writers.DefaultDirectoryWriter.java

private void writeFileHtml(final StringBuilder builder, final boolean shade, final Entry entry,
        final boolean isDirectory) {
    builder.append("<tr");
    if (shade) {/*from  ww  w .  ja v  a  2s. c o m*/
        builder.append(" bgcolor=\"#eeeeee\"");
    }
    builder.append(">");

    builder.append("<td align=\"left\">&nbsp;&nbsp;");
    builder.append("<a href=\"").append(entry.getUrl()).append("\">");
    builder.append("<tt>").append(entry.getName()).append("</tt></a></td>");
    builder.append("<td align=\"right\"><tt>");
    if (isDirectory) {
        builder.append("-");
    } else {
        builder.append(FileUtils.byteCountToDisplaySize(entry.getSize()));
    }
    builder.append("</tt></td>");
    final String lastModified = ConcurrentDateFormat.formatRfc1123(entry.getLastModified());
    builder.append("<td align=\"right\"><tt>").append(lastModified).append("</tt></td>");
    builder.append("</tr>");
}

From source file:com.adobe.acs.commons.httpcache.store.caffeine.impl.CaffeineMemHttpCacheStoreImpl.java

@Override
protected void addCacheData(Map<String, Object> data, MemCachePersistenceObject cacheObj) {
    int hitCount = cacheObj.getHitCount();
    long size = cacheObj.getBytes().length;
    data.put(AbstractCacheMBean.JMX_PN_STATUS, cacheObj.getStatus());
    data.put(AbstractCacheMBean.JMX_PN_SIZE, FileUtils.byteCountToDisplaySize(size));
    data.put(AbstractCacheMBean.JMX_PN_CONTENTTYPE, cacheObj.getContentType());
    data.put(AbstractCacheMBean.JMX_PN_CHARENCODING, cacheObj.getCharEncoding());
    data.put(AbstractCacheMBean.JMX_PN_HITS, hitCount);
    data.put(AbstractCacheMBean.JMX_PN_TOTALSIZESERVED, FileUtils.byteCountToDisplaySize(hitCount * size));
}

From source file:com.talis.entity.db.EntityDatabasePerfTestBase.java

@Test
public void benchmarkRoundTripping() throws Exception {
    System.out.println("Round trip quads");
    int iter = 10000;
    db.begin();/*from  ww w .ja  v a 2  s .c  o  m*/
    long start = System.currentTimeMillis();
    for (int i = 0; i < iter; i++) {
        db.put(subject, graph, quads);
        db.get(subject);
    }
    db.commit();
    long end = System.currentTimeMillis();
    long duration = end - start;

    System.out.println(String.format("Iterations: %s, Total: %s, PerOp: %s", iter, duration,
            (double) ((double) duration / (double) iter)));
    long size = FileUtils.sizeOfDirectory(tmpDir.getRoot());
    System.out.println(String.format("Size on disk : %s (%s)", FileUtils.byteCountToDisplaySize(size), size));

}

From source file:net.sf.jabb.web.action.VfsTreeAction.java

/**
 * It transforms FileObject into JsTreeNodeData.
 * @param file  the file whose information will be encapsulated in the node data structure.
 * @return   The node data structure which presents the file.
 * @throws FileSystemException //  ww  w  .  j a  va 2  s  . c  om
 */
protected JsTreeNodeData populateTreeNodeData(FileObject file, boolean noChild, String relativePath)
        throws FileSystemException {
    JsTreeNodeData node = new JsTreeNodeData();

    String baseName = file.getName().getBaseName();
    FileContent content = file.getContent();
    FileType type = file.getType();

    node.setData(baseName);

    Map<String, Object> attr = new HashMap<String, Object>();
    node.setAttr(attr);
    attr.put("id", relativePath);
    attr.put("rel", type.getName());
    attr.put("fileType", type.getName());
    if (content != null) {
        long fileLastModifiedTime = file.getContent().getLastModifiedTime();
        attr.put("fileLastModifiedTime", fileLastModifiedTime);
        attr.put("fileLastModifiedTimeForDisplay",
                DateFormat.getDateTimeInstance().format(new Date(fileLastModifiedTime)));
        if (file.getType() != FileType.FOLDER) {
            attr.put("fileSize", content.getSize());
            attr.put("fileSizeForDisplay", FileUtils.byteCountToDisplaySize(content.getSize()));
        }
    }

    // these fields should not appear in JSON for leaf nodes
    if (!noChild) {
        node.setState(JsTreeNodeData.STATE_CLOSED);
    }
    return node;
}

From source file:net.arp7.HdfsPerfTest.WriteFile.java

static private void writeStats(final FileIoStats stats) {
    LOG.info("Total files written: " + stats.getFilesWritten());
    LOG.info("Total data written: " + FileUtils.byteCountToDisplaySize(stats.getBytesWritten()));
    LOG.info("Mean Time to create each file on NN: " + String.format("%.2f", stats.getMeanCreateTimeMs())
            + " ms");
    LOG.info("Mean Time to write each file: " + String.format("%.2f", stats.getMeanWriteTimeMs()) + " ms");
    LOG.info("Mean Time to close each file: " + String.format("%.2f", stats.getMeanCloseTimeMs()) + " ms");
    LOG.info("Total elapsed time: " + Utils.formatNumber(stats.getElapsedTimeMs()) + " ms");
    long throughput = 0;
    if (stats.getElapsedTimeMs() > 0) {
        throughput = (params.getNumFiles() * params.getFileSize()) / stats.getElapsedTimeMs();
    }/*from  w w w.  ja  v a2s. com*/
    LOG.info("Aggregate throughput: " + Utils.formatNumber(throughput) + " KBps");
}