Example usage for org.apache.commons.io FileUtils byteCountToDisplaySize

List of usage examples for org.apache.commons.io FileUtils byteCountToDisplaySize

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils byteCountToDisplaySize.

Prototype

public static String byteCountToDisplaySize(long size) 

Source Link

Document

Returns a human-readable version of the file size, where the input represents a specific number of bytes.

Usage

From source file:pl.nask.hsn2.service.ShellcodeTask.java

private File downloadFile(File targetDir) throws ResourceException, StorageException {
    InputStream is = null;/*from ww  w  .  j a  v  a 2  s. c o m*/
    FileOutputStream fos = null;
    try {
        long downloadTimeStart = System.currentTimeMillis();
        File tmpFile = File.createTempFile(
                jobContext.getJobId() + "-" + jobContext.getReqId() + "-" + fileId + "-", "", targetDir);
        is = jobContext.getFileAsInputStream(fileId);
        fos = new FileOutputStream(tmpFile);
        IOUtils.copy(is, fos);
        LOGGER.debug("Downloaded file (size={}) in {} ms", FileUtils.byteCountToDisplaySize(tmpFile.length()),
                System.currentTimeMillis() - downloadTimeStart);
        return tmpFile;
    } catch (IOException e) {
        throw new ResourceException("Cannot create temporary file", e);
    } finally {
        IOUtils.closeQuietly(is);
        IOUtils.closeQuietly(fos);
    }
}

From source file:pl.nask.hsn2.service.SwfTask.java

private File downloadSwfFile() throws ResourceException, StorageException {
    InputStream is = null;//  w w  w.  j  av  a 2  s .c o m
    FileOutputStream fos = null;
    try {
        long downloadTimeStart = System.currentTimeMillis();
        File tmpFile = File.createTempFile(
                jobContext.getJobId() + "-" + jobContext.getReqId() + "-" + fileId + "-", ".swf");
        tmpFile.deleteOnExit();
        is = jobContext.getFileAsInputStream(fileId);
        fos = new FileOutputStream(tmpFile);
        IOUtils.copy(is, fos);
        LOGGER.debug("Downloaded file (size={}) in {} ms", FileUtils.byteCountToDisplaySize(tmpFile.length()),
                System.currentTimeMillis() - downloadTimeStart);
        return tmpFile;
    } catch (IOException e) {
        throw new ResourceException("Cannot create temporary file", e);
    } finally {
        IOUtils.closeQuietly(is);
        IOUtils.closeQuietly(fos);
    }
}

From source file:ro.nextreports.server.web.debug.InfoUtil.java

public static List<Info> getGeneralJVMInfo() {
    List<Info> infos = new ArrayList<Info>();

    RuntimeMXBean runtimeBean = ManagementFactory.getRuntimeMXBean();
    infos.add(new Info("uptime", "" + Duration.milliseconds(runtimeBean.getUptime()).toString()));
    infos.add(new Info("name", runtimeBean.getName()));
    infos.add(new Info("pid", runtimeBean.getName().split("@")[0]));

    OperatingSystemMXBean systemBean = ManagementFactory.getOperatingSystemMXBean();
    infos.add(new Info("os name", "" + systemBean.getName()));
    infos.add(new Info("os version", "" + systemBean.getVersion()));
    infos.add(new Info("system load average", "" + systemBean.getSystemLoadAverage()));
    infos.add(new Info("available processors", "" + systemBean.getAvailableProcessors()));

    ThreadMXBean threadBean = ManagementFactory.getThreadMXBean();
    infos.add(new Info("thread count", "" + threadBean.getThreadCount()));
    infos.add(new Info("peak thread count", "" + threadBean.getPeakThreadCount()));

    MemoryMXBean memoryBean = ManagementFactory.getMemoryMXBean();
    infos.add(new Info("heap memory used",
            FileUtils.byteCountToDisplaySize(memoryBean.getHeapMemoryUsage().getUsed())));
    infos.add(new Info("non-heap memory used",
            FileUtils.byteCountToDisplaySize(memoryBean.getNonHeapMemoryUsage().getUsed())));

    return infos;
}

From source file:ro.nextreports.server.web.debug.SystemLogPage.java

public SystemLogPage() {
    super();//www.ja v  a  2s .  c om

    FileAppender appender = (FileAppender) LogManager.getRootLogger().getAppender("FILE");
    File logFile = new File(appender.getFile());

    String content;
    try {
        content = FileUtils.readFileToString(logFile);
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        content = e.toString();
    }

    add(new Label("size", FileUtils.byteCountToDisplaySize(logFile.length())));
    add(new Label("lastModified", new Date(logFile.lastModified()).toString()));
    add(new MultiLineLabel("log", content));
}

From source file:serposcope.lifecycle.DBSizeUtils.java

public String getDbUsageFormatted() {
    long dbUsage = getDbUsage();
    if (dbUsage == -1) {
        return null;
    }// w  w w  .  j av a2  s .  com
    return FileUtils.byteCountToDisplaySize(dbUsage);
}

From source file:serposcope.lifecycle.DBSizeUtils.java

public String getDiskFreeFormatted() {
    long diskFree = getDiskFree();
    if (diskFree == -1) {
        return null;
    }/*  w  w  w  . j  a va  2s .co  m*/
    return FileUtils.byteCountToDisplaySize(diskFree);
}

From source file:spade.utility.CommonFunctions.java

public static void closePrintSizeAndDeleteExternalMemoryMap(String id, ExternalMemoryMap<?, ?> map) {
    if (map != null) {
        try {//from www  .j  a  v  a2s.com
            map.close();
        } catch (Exception e) {
            logger.log(Level.WARNING, id + ": Failed to close external map", e);
        }
        BigInteger sizeBytes = null;
        try {
            sizeBytes = map.getSizeOfPersistedDataInBytes();
            if (sizeBytes == null) {
                logger.log(Level.INFO, id + ": Failed to get size of external map");
            }
        } catch (Exception e) {
            logger.log(Level.WARNING, id + ": Failed to get size of external map", e);
        }
        if (sizeBytes != null) {
            String displaySize = FileUtils.byteCountToDisplaySize(sizeBytes);
            logger.log(Level.INFO, id + ": Size of the external map on disk: {0}", displaySize);
        }
        try {
            map.delete();
        } catch (Exception e) {
            logger.log(Level.WARNING, id + ": Failed to delete external map", e);
        }
    } else {
        logger.log(Level.WARNING, id + ": NULL external map");
    }
}

From source file:ui.FtpDialog.java

private void uploadFileBtActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_uploadFileBtActionPerformed

    if (connected) {
        JFileChooser uploadFileChooser = new JFileChooser();
        uploadFileChooser.setPreferredSize(new Dimension(650, 450));

        File rootDirectory = new File("C:\\");

        uploadFileChooser//from   w ww. j  ava  2  s  .  co  m
                .setCurrentDirectory(uploadFileChooser.getFileSystemView().getParentDirectory(rootDirectory));

        uploadFileChooser.setDialogTitle("File to upload");
        int result = uploadFileChooser.showOpenDialog(this);
        switch (result) {

        case JFileChooser.APPROVE_OPTION:
            selectedFile = uploadFileChooser.getSelectedFile();

            Trace.trc("File to upload: " + selectedFile.getName() + " File size: "
                    + FileUtils.byteCountToDisplaySize(selectedFile.length()));

            if (connected) {
                if (!selectedFile.equals(null)) {
                    try {
                        input = new FileInputStream(selectedFile);
                    } catch (IOException e) {
                        e.printStackTrace();
                    }

                    pb = new ProgressBar();
                    pb.execute();

                    uploadFileChooser.setVisible(false);

                } else {
                    JOptionPane.showMessageDialog(rootFrame, "No file to upload has been chosen!", "Error",
                            JOptionPane.ERROR_MESSAGE);
                }
            } else {
                JOptionPane.showMessageDialog(rootFrame, "Not connected to a server, cannot upload file!",
                        "Error", JOptionPane.ERROR_MESSAGE);
            }
            break;

        case JFileChooser.CANCEL_OPTION:
            Trace.trc("Closing file chooser dialog");
            break;

        case JFileChooser.ERROR_OPTION:
            Trace.trc("An error occured");
            break;
        }

    } else {
        JOptionPane.showMessageDialog(rootFrame, "Not connected to a server, cannot upload file!", "Error",
                JOptionPane.ERROR_MESSAGE);
    }

}

From source file:uk.ac.ebi.metabolights.controller.SubmissionQueueController.java

@RequestMapping(value = "/queueExperiment", method = RequestMethod.POST)
public ModelAndView queueExperiment(@RequestParam(required = true, value = "file") MultipartFile file,
        @RequestParam(required = true, value = "pickdate") String publicDate,
        @RequestParam(required = false, value = "study") String study,
        @RequestParam(required = false, value = "owner") String ownerId,
        @RequestParam(required = false, value = "validated", defaultValue = "false") boolean validated,
        HttpServletRequest request) throws Exception {

    //Start the submission process...
    logger.info("Queue Experiment. Start");

    StringBuffer messageBody = new StringBuffer();
    String hostName = java.net.InetAddress.getLocalHost().getHostName();
    messageBody.append("Study submission started from machine " + hostName);

    // Get the actual user (could be a curator).
    MetabolightsUser actualUser = (MetabolightsUser) (SecurityContextHolder.getContext().getAuthentication()
            .getPrincipal());/*from w  ww .  java2  s  .  c  om*/

    // The submitter, by default is the same user
    MetabolightsUser submitter = actualUser;

    // If the user is a curator but there is an owner...
    if (ownerId != null && actualUser.isCurator()) {

        // Overwrite the submitter with the owner...
        submitter = userService.lookupByUserName(ownerId);
    }

    try {

        if (file.isEmpty())
            throw new BIIException(PropertyLookup.getMessage("BIISubmit.fileEmpty"));

        if (publicDate.isEmpty())
            throw new BIIException(PropertyLookup.getMessage("BIISubmit.dateEmpty"));

        if (!file.getOriginalFilename().toLowerCase().endsWith("zip"))
            throw new BIIException(PropertyLookup.getMessage("BIISubmit.fileExtension"));

        if (!validated)
            throw new BIIException(PropertyLookup.getMessage("BIISubmit.notValidated"));

        Date publicDateD;
        SimpleDateFormat sdf = new SimpleDateFormat("dd-MMM-yyyy");
        publicDateD = sdf.parse(publicDate); //Date from the form

        // Extend the message...
        messageBody.append("\nFileName: " + file.getOriginalFilename());

        if (submitter.getUserName().equals(actualUser.getUserName())) {
            messageBody.append("\nUser: " + actualUser.getUserName());
        } else {
            messageBody
                    .append("\nUser: " + actualUser.getUserName() + "on behalf of " + submitter.getUserName());
        }
        if (study == null) {
            messageBody.append("\nNEW STUDY");
        } else {
            messageBody.append("\nSTUDY: " + study);
        }
        messageBody.append("\nPublic Release Date: " + publicDate);

        logger.info("Queueing study");
        SubmissionItem si = new SubmissionItem(file, submitter.getApiToken(), publicDateD, study, false);

        // Submit the item to the queue...
        si.submitToQueue();

        messageBody.append("\n\n File Successfully queued.");

        logger.info("Queued study. Adding data to session");
        HttpSession httpSession = request.getSession();
        httpSession.setAttribute("itemQueued", "msg.studyQueueSuccesfully");

        // Cannot load the queue
        emailService.sendQueuedStudyEmail(submitter.getEmail(), si.getOriginalFileName(),
                FileUtils.byteCountToDisplaySize(si.getFileQueued().length()), si.getPublicReleaseDate(),
                hostName, study);

        return new ModelAndView("redirect:itemQueued");

    } catch (BIIException e) {

        ModelAndView mav = AppContext.getMAVFactory().getFrontierMav("submitError");
        logger.error("Submission exception", e);
        mav.addObject("error", e);
        mav.addObject("studyId", study);
        return mav;

    } catch (Exception e) {

        ModelAndView mav = AppContext.getMAVFactory().getFrontierMav("submitError");
        logger.error("Submission exception", e);
        mav.addObject("error", e);
        // Add the study id...
        mav.addObject("studyId", study);

        messageBody.append("\n\nERROR!!!!!\n\n" + e.getMessage());
        emailService.sendSimpleEmail(
                "queueExperiment FAILED in " + hostName + " by " + actualUser.getUserName(),
                messageBody.toString());

        return mav;

    }
}

From source file:uk.bl.wa.indexer.WARCIndexer.java

/**
 * This extracts metadata from the ArchiveRecord and creates a suitable SolrRecord.
 * Removes the text field if flag set./*from  w w  w  .java2s  .c  om*/
 * 
 * @param archiveName
 * @param record
 * @param isTextIncluded
 * @return
 * @throws IOException
 */
public SolrRecord extract(String archiveName, ArchiveRecord record, boolean isTextIncluded) throws IOException {
    final long start = System.nanoTime();
    ArchiveRecordHeader header = record.getHeader();
    SolrRecord solr = solrFactory.createRecord(archiveName, header);

    if (!header.getHeaderFields().isEmpty()) {
        if (header.getHeaderFieldKeys().contains(HEADER_KEY_TYPE)) {
            log.debug("Looking at " + header.getHeaderValue(HEADER_KEY_TYPE));

            if (!checkRecordType((String) header.getHeaderValue(HEADER_KEY_TYPE))) {
                return null;
            }
            // Store WARC record type:
            solr.setField(SolrFields.SOLR_RECORD_TYPE, (String) header.getHeaderValue(HEADER_KEY_TYPE));

            //Store WARC-Record-ID
            solr.setField(SolrFields.WARC_KEY_ID, (String) header.getHeaderValue(HEADER_KEY_ID));
            solr.setField(SolrFields.WARC_IP, (String) header.getHeaderValue(HEADER_KEY_IP));

        } else {
            // else we're processing ARCs so nothing to filter and no
            // revisits
            solr.setField(SolrFields.SOLR_RECORD_TYPE, "arc");
        }

        if (header.getUrl() == null)
            return null;

        // Get the URL:
        String targetUrl = Normalisation.sanitiseWARCHeaderValue(header.getUrl());

        // Strip down very long URLs to avoid
        // "org.apache.commons.httpclient.URIException: Created (escaped)
        // uuri > 2083"
        // Trac #2271: replace string-splitting with URI-based methods.
        if (targetUrl.length() > 2000)
            targetUrl = targetUrl.substring(0, 2000);

        log.debug(
                "Current heap usage: " + FileUtils.byteCountToDisplaySize(Runtime.getRuntime().totalMemory()));
        log.debug("Processing " + targetUrl + " from " + archiveName);

        // Check the filters:
        if (this.checkProtocol(targetUrl) == false)
            return null;
        if (this.checkUrl(targetUrl) == false)
            return null;
        if (this.checkExclusionFilter(targetUrl) == false)
            return null;

        // -----------------------------------------------------
        // Add user supplied Archive-It Solr fields and values:
        // -----------------------------------------------------
        solr.setField(SolrFields.INSTITUTION, WARCIndexerCommand.institution);
        solr.setField(SolrFields.COLLECTION, WARCIndexerCommand.collection);
        solr.setField(SolrFields.COLLECTION_ID, WARCIndexerCommand.collection_id);

        // --- Basic headers ---

        // Basic metadata:
        solr.setField(SolrFields.SOURCE_FILE, archiveName);
        solr.setField(SolrFields.SOURCE_FILE_OFFSET, "" + header.getOffset());
        String filePath = header.getReaderIdentifier();//Full path of file                        

        //Will convert windows path to linux path. Linux paths will not be modified.
        String linuxFilePath = FilenameUtils.separatorsToUnix(filePath);
        solr.setField(SolrFields.SOURCE_FILE_PATH, linuxFilePath);

        byte[] url_md5digest = md5
                .digest(Normalisation.sanitiseWARCHeaderValue(header.getUrl()).getBytes("UTF-8"));
        // String url_base64 =
        // Base64.encodeBase64String(fullUrl.getBytes("UTF-8"));
        String url_md5hex = Base64.encodeBase64String(url_md5digest);
        solr.setField(SolrFields.SOLR_URL, Normalisation.sanitiseWARCHeaderValue(header.getUrl()));
        if (addNormalisedURL) {
            solr.setField(SolrFields.SOLR_URL_NORMALISED, Normalisation.canonicaliseURL(targetUrl));
        }

        // Get the length, but beware, this value also includes the HTTP headers (i.e. it is the payload_length):
        long content_length = header.getLength();

        // Also pull out the file extension, if any:
        String resourceName = parseResourceName(targetUrl);
        solr.addField(SolrFields.RESOURCE_NAME, resourceName);
        solr.addField(SolrFields.CONTENT_TYPE_EXT, parseExtension(resourceName));

        // Add URL-based fields:
        URI saneURI = parseURL(solr, targetUrl);

        // Prepare crawl date information:
        String waybackDate = (header.getDate().replaceAll("[^0-9]", ""));
        Date crawlDate = getWaybackDate(waybackDate);

        // Store the dates:
        solr.setField(SolrFields.CRAWL_DATE, formatter.format(crawlDate));
        solr.setField(SolrFields.CRAWL_YEAR, getYearFromDate(crawlDate));

        // Use the current value as the waybackDate:
        solr.setField(SolrFields.WAYBACK_DATE, waybackDate);

        Instrument.timeRel("WARCIndexer.extract#total", "WARCIndexer.extract#archeaders", start);

        // -----------------------------------------------------
        // Now consume record and HTTP headers (only)
        // -----------------------------------------------------

        InputStream tikainput = null;

        // Only parse HTTP headers for HTTP URIs
        if (targetUrl.startsWith("http")) {
            // Parse HTTP headers:
            String statusCode = null;
            if (record instanceof WARCRecord) {
                statusCode = this.processWARCHeaders(record, header, targetUrl, solr);
                tikainput = record;
            } else if (record instanceof ARCRecord) {
                ARCRecord arcr = (ARCRecord) record;
                statusCode = "" + arcr.getStatusCode();
                this.processHeaders(solr, statusCode, arcr.getHttpHeaders(), targetUrl);
                arcr.skipHttpHeader();
                tikainput = arcr;
            } else {
                log.error("FAIL! Unsupported archive record type.");
                return solr;
            }

            solr.setField(SolrFields.SOLR_STATUS_CODE, statusCode);

            // Skip recording non-content URLs (i.e. 2xx responses only please):
            if (!checkResponseCode(statusCode)) {
                log.debug("Skipping this record based on status code " + statusCode + ": " + targetUrl);
                return null;
            }
        } else {
            log.info("Skipping header parsing as URL does not start with 'http'");
        }

        // -----------------------------------------------------
        // Headers have been processed, payload ready to cache:
        // -----------------------------------------------------

        // Update the content_length based on what's available:
        content_length = tikainput.available();

        // Record the length:
        solr.setField(SolrFields.CONTENT_LENGTH, "" + content_length);

        // Create an appropriately cached version of the payload, to allow analysis.
        final long hashStreamStart = System.nanoTime();
        HashedCachedInputStream hcis = new HashedCachedInputStream(header, tikainput, content_length);
        tikainput = hcis.getInputStream();
        String hash = hcis.getHash();
        Instrument.timeRel("WARCIndexer.extract#total", "WARCIndexer.extract#hashstreamwrap", hashStreamStart);

        // Use an ID that ensures every URL+timestamp gets a separate
        // record:
        String id = waybackDate + "/" + url_md5hex;

        // Set these last:
        solr.setField(SolrFields.ID, id);
        solr.setField(SolrFields.HASH, hash);

        // -----------------------------------------------------
        // Apply any annotations:
        // -----------------------------------------------------
        if (ant != null) {
            try {
                ant.applyAnnotations(saneURI, solr.getSolrDocument());
            } catch (URISyntaxException e) {
                e.printStackTrace();
                log.error("Failed to annotate " + saneURI + " : " + e);
            }
        }

        // -----------------------------------------------------
        // WARC revisit record handling:
        // -----------------------------------------------------

        // If this is a revisit record, we should just return an update to the crawl_dates (when using hashUrlId)
        if (WARCConstants.WARCRecordType.revisit.name()
                .equalsIgnoreCase((String) header.getHeaderValue(HEADER_KEY_TYPE))) {
            solr.removeField(SolrFields.CONTENT_LENGTH); //It is 0 and would mess with statistics                                                                                
            //Copy content_type_served to content_type (no tika/droid for revisits)
            solr.addField(SolrFields.SOLR_CONTENT_TYPE,
                    (String) solr.getFieldValue(SolrFields.CONTENT_TYPE_SERVED));
            return solr;
        }

        // -----------------------------------------------------
        // Payload duplication has been checked, ready to parse:
        // -----------------------------------------------------

        final long analyzeStart = System.nanoTime();

        // Mark the start of the payload, with a readLimit corresponding to
        // the payload size:
        tikainput.mark((int) content_length);

        // Pass on to other extractors as required, resetting the stream before each:
        this.wpa.analyse(archiveName, header, tikainput, solr, content_length);
        Instrument.timeRel("WARCIndexer.extract#total", "WARCIndexer.extract#analyzetikainput", analyzeStart);

        // Clear up the caching of the payload:
        hcis.cleanup();

        // -----------------------------------------------------
        // Payload analysis complete, now performing text analysis:
        // -----------------------------------------------------

        this.txa.analyse(solr);

        // Remove the Text Field if required
        if (!isTextIncluded) {
            solr.removeField(SolrFields.SOLR_EXTRACTED_TEXT);

        } else {
            // Otherwise, decide whether to store or both store and index
            // the text:
            if (storeText == false) {
                // Copy the text into the indexed (but not stored) field:
                solr.setField(SolrFields.SOLR_EXTRACTED_TEXT_NOT_STORED,
                        (String) solr.getField(SolrFields.SOLR_EXTRACTED_TEXT).getFirstValue());
                // Take the text out of the original (stored) field.
                solr.removeField(SolrFields.SOLR_EXTRACTED_TEXT);
            }
        }
    }
    Instrument.timeRel("WARCIndexerCommand.parseWarcFiles#solrdocCreation", "WARCIndexer.extract#total", start);
    String servedType = "" + solr.getField(SolrFields.CONTENT_TYPE_SERVED);
    Instrument.timeRel("WARCIndexer#content_types",
            "WARCIndexer#" + (servedType.contains(";") ? servedType.split(";")[0] : servedType), start);
    Instrument.timeRel("WARCIndexer#content_types", start);
    return solr;
}