Example usage for java.nio.file.attribute BasicFileAttributes size

List of usage examples for java.nio.file.attribute BasicFileAttributes size

Introduction

In this page you can find the example usage for java.nio.file.attribute BasicFileAttributes size.

Prototype

long size();

Source Link

Document

Returns the size of the file (in bytes).

Usage

From source file:org.mycore.common.xml.MCRXMLFunctions.java

/**
 * Method returns the amount of space consumed by the files contained in the
 * derivate container. The returned string is already formatted meaning it
 * has already the optimal measurement unit attached (e.g. 142 MB, ).
 *
 * @param derivateId/* w w  w.  j ava  2 s  . com*/
 *            the derivate id for which the size should be returned
 * @return the size as formatted string
 */
public static String getSize(String derivateId) throws IOException {
    MCRPath rootPath = MCRPath.getPath(derivateId, "/");
    final AtomicLong size = new AtomicLong();
    Files.walkFileTree(rootPath, new SimpleFileVisitor<Path>() {
        @Override
        public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
            size.addAndGet(attrs.size());
            return super.visitFile(file, attrs);
        }

    });
    return MCRUtils.getSizeFormatted(size.get());
}

From source file:io.uploader.drive.drive.DriveOperations.java

private static void uploadFiles(OperationResult operationResult, Map<Path, File> localPathDriveFileMapping,
        Drive client, Path srcDir, boolean overwrite, final StopRequester stopRequester,
        final HasStatusReporter statusReporter) throws IOException {

    Queue<Path> filesQueue = io.uploader.drive.util.FileUtils.getAllFilesPath(srcDir,
            FileFinderOption.FILE_ONLY);

    int count = 0;
    for (Path path : filesQueue) {
        try {//from   w  w  w  .ja  va  2  s. co  m
            if (statusReporter != null) {
                BasicFileAttributes attr = io.uploader.drive.util.FileUtils.getFileAttr(path);
                StringBuilder sb = new StringBuilder();
                sb.append("Transfering files (");
                sb.append(path.getFileName().toString());
                if (attr != null) {
                    sb.append(" - size: ");
                    sb.append(io.uploader.drive.util.FileUtils.humanReadableByteCount(attr.size(), true));
                }
                sb.append(")");
                statusReporter.setStatus(sb.toString());
            }

            if (hasStopBeenRequested(stopRequester)) {
                if (statusReporter != null) {
                    statusReporter.setStatus("Stopped!");
                }
                operationResult.setStatus(OperationCompletionStatus.STOPPED);
                return;
            }

            final File driveParent = localPathDriveFileMapping.get(path.getParent());
            if (driveParent == null) {
                throw new IllegalStateException(
                        "The path " + path.toString() + " does not have any parent in the drive (parent path "
                                + path.getParent().toString() + ")...");
            }

            InputStreamProgressFilter.StreamProgressCallback progressCallback = null;
            if (statusReporter != null) {
                progressCallback = new InputStreamProgressFilter.StreamProgressCallback() {

                    @Override
                    public void onStreamProgress(double progress) {
                        if (statusReporter != null) {
                            statusReporter.setCurrentProgress(progress);
                        }
                    }
                };
            }
            uploadFile(operationResult, client, driveParent, path, overwrite, progressCallback);

            ++count;
            if (statusReporter != null) {
                double p = ((double) count) / filesQueue.size();
                statusReporter.setTotalProgress(p);
                statusReporter.setStatus("Transfering files...");
            }
        } catch (Throwable e) {
            logger.error("Error occurred while transfering the file " + path.toString(), e);
            operationResult.setStatus(OperationCompletionStatus.ERROR);
            operationResult.addError(path, e);
        }
    }
}

From source file:de.bbe_consulting.mavento.helper.visitor.FileSizeVisitor.java

@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {

    if (attrs.isRegularFile()) {
        sizeTotal.add(attrs.size());
    }/*from   ww  w. j  a  v  a 2s  . co m*/
    return FileVisitResult.CONTINUE;
}

From source file:jduagui.Controller.java

public static long getSize(String startPath, Map<String, Long> dirs, Map<String, Long> files)
        throws IOException {
    final AtomicLong size = new AtomicLong(0);
    final AtomicLong subdirs = new AtomicLong(0);
    final AtomicLong fs = new AtomicLong(0);
    final File f = new File(startPath);
    final String str = "";
    Path path = Paths.get(startPath);

    Files.walkFileTree(path, new SimpleFileVisitor<Path>() {
        @Override//www  .j ava 2 s. c o  m
        public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) {
            subdirs.incrementAndGet();
            return FileVisitResult.CONTINUE;
        }

        @Override
        public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
            fs.incrementAndGet();
            size.addAndGet(attrs.size());
            return FileVisitResult.CONTINUE;
        }

        @Override
        public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException {
            fs.incrementAndGet();
            return FileVisitResult.CONTINUE;
        }
    });
    if (subdirs.decrementAndGet() == -1)
        subdirs.incrementAndGet();

    if (f.isDirectory()) {
        dirs.put(startPath, subdirs.get());
        files.put(startPath, fs.get());
    }
    return size.get();
}

From source file:org.mycore.services.zipper.MCRZipServlet.java

@Override
protected void sendCompressedFile(MCRPath file, BasicFileAttributes attrs, ZipArchiveOutputStream container)
        throws IOException {
    ZipArchiveEntry entry = new ZipArchiveEntry(getFilename(file));
    entry.setTime(attrs.lastModifiedTime().toMillis());
    entry.setSize(attrs.size());
    container.putArchiveEntry(entry);//from w w  w.ja v a 2 s  .c  o m
    try {
        Files.copy(file, container);
    } finally {
        container.closeArchiveEntry();
    }
}

From source file:com.streamsets.pipeline.lib.io.LiveFile.java

/**
 * Creates a <code>LiveFile</code> given a {@link Path}.
 *
 * @param path the Path of the LiveFile. The file referred by the Path must exist.
 * @throws IOException thrown if the LiveFile does not exist.
 *//*from  w  w  w . j a v  a  2s.  c  om*/
public LiveFile(Path path) throws IOException {
    Utils.checkNotNull(path, "path");
    this.path = path.toAbsolutePath();
    if (!Files.isRegularFile(this.path)) {
        throw new NoSuchFileException(Utils.format("Path '{}' is not a file", this.path));
    }
    BasicFileAttributes attrs = Files.readAttributes(path, BasicFileAttributes.class);
    headLen = (int) Math.min(HEAD_LEN, attrs.size());
    headHash = computeHash(path, headLen);
    iNode = attrs.fileKey().toString();
}

From source file:com.streamsets.pipeline.lib.io.LiveFile.java

/**
 * Refreshes the <code>LiveFile</code>, if the file was renamed, the path will have the new name.
 *
 * @return the refreshed file if the file has been renamed, or itself if the file has not been rename or the file
 * does not exist in the directory anymore.
 * @throws IOException thrown if the LiveFile could not be refreshed
 *///w  w  w .  j  av  a2 s .  c  o m
public LiveFile refresh() throws IOException {
    LiveFile refresh = this;
    boolean changed;
    try {
        BasicFileAttributes attrs = Files.readAttributes(path, BasicFileAttributes.class);
        String iNodeCurrent = attrs.fileKey().toString();
        int headLenCurrent = (int) Math.min(headLen, attrs.size());
        String headHashCurrent = computeHash(path, headLenCurrent);
        changed = !this.iNode.equals(iNodeCurrent) || !this.headHash.equals(headHashCurrent);
    } catch (NoSuchFileException ex) {
        changed = true;
    }
    if (changed) {

        try (DirectoryStream<Path> directoryStream = Files.newDirectoryStream(path.getParent())) {
            for (Path path : directoryStream) {
                if (path.toFile().isDirectory()) {
                    continue;
                }
                BasicFileAttributes attrs = Files.readAttributes(path, BasicFileAttributes.class);
                String iNode = attrs.fileKey().toString();
                int headLen = (int) Math.min(this.headLen, attrs.size());
                String headHash = computeHash(path, headLen);
                if (iNode.equals(this.iNode) && headHash.equals(this.headHash)) {
                    if (headLen == 0) {
                        headLen = (int) Math.min(HEAD_LEN, attrs.size());
                        headHash = computeHash(path, headLen);
                    }
                    return new LiveFile(path, iNode, headHash, headLen);
                } /**rename??*/
            }
        }
        return null;
    } /**change? itself*/
    return refresh;
}

From source file:io.uploader.drive.drive.largefile.GDriveUpload.java

private String uploadFile(DriveResumableUpload upload, BasicFileAttributes attr) throws IOException {

    long currentBytePosition = upload.getCurrentByte();
    File file = new File(filename);
    if (currentBytePosition > -1 && currentBytePosition < attr.size()) {
        byte[] chunk;
        int retries = 0;
        while (retries < 5) {
            InputStream stream = io.uploader.drive.util.FileUtils
                    .getInputStreamWithProgressFilter(progressCallback, attr.size(), new FileInputStream(file));
            if (currentBytePosition > 0) {
                stream.skip(currentBytePosition);
            }//w ww  .j  ava2  s  .  c  om
            chunk = new byte[chunkSize];
            int bytes_read = stream.read(chunk, 0, chunkSize);
            stream.close();
            if (bytes_read > 0) {
                int status = upload.uploadChunk(chunk, currentBytePosition, bytes_read);
                if (status == 308) {
                    // If Status is 308 RESUME INCOMPLETE there's no retry done.
                    retries = 0;
                } else if (status >= 500 && status < 600) {
                    // Good practice: Exponential backoff
                    try {
                        long seconds = Math.round(Math.pow(2, retries + 1));
                        logger.info("Exponential backoff. Waiting " + seconds + " seconds.");
                        Thread.sleep(seconds * 1000);
                    } catch (InterruptedException ex) {
                        Thread.currentThread().interrupt();
                    }
                } else if (status == 401) {
                    logger.info("Tokan has experied, need to be refreshed...");
                    upload.updateAccessToken();
                } else if (status == 200 || status == 201) {

                    boolean success = upload.checkMD5(md5);
                    logger.info("local md5sum: " + md5);
                    logger.info("File upload complete.");
                    if (!success) {
                        throw new TransferException(false, "The md5 values do not macth");
                    }
                    break;
                } else if (status == 404) {
                    // this can be due to a remaining temporary file with an out-dated link
                    // we throw that exception with no recovery option (not resumable) in order to 
                    // delete this file (if any)
                    throw new TransferException(false, "The file cannot be found");
                } else {
                    logger.info("Status: " + String.valueOf(status));
                }
            }
            ++retries;
            currentBytePosition = upload.getCurrentByte();
        }
    } else if (currentBytePosition == attr.size()) {
        boolean success = upload.checkMD5(md5);
        logger.info("local md5sum: " + md5);
        logger.info("File upload complete.");

        if (!success) {
            throw new IOException("The md5 values do not macth");
        }
    } else {
        // Some BUG occured. lastbyte = -1.
        throw new TransferException(false, "Some anomalies have been observed");
    }
    // get file id
    return upload.getFileId();
}

From source file:functionaltests.job.log.TestJobServerLogs.java

private void printDiagnosticMessage() {
    int LIMIT = 5;
    System.out.println("This test is going to fail, but before we print diagnostic message."
            + simpleDateFormat.format(new Date()));
    // iterate over all files in the 'logsLocation'
    for (File file : FileUtils.listFiles(new File(logsLocation), TrueFileFilter.INSTANCE,
            TrueFileFilter.INSTANCE)) {//from www.  ja  v a 2  s  . c  om
        try {
            BasicFileAttributes attr = Files.readAttributes(file.toPath(), BasicFileAttributes.class);
            System.out.println(String.format("Name: %s, Size: %d, Created: %s, Modified: %s",
                    file.getAbsolutePath(), attr.size(), attr.creationTime(), attr.lastModifiedTime()));
            BufferedReader br = new BufferedReader(new FileReader(file));
            String line;
            int i;
            // print up to LIMIT first lines
            for (i = 0; i < LIMIT && (line = br.readLine()) != null; ++i) {
                System.out.println(line);
            }

            Queue<String> queue = new CircularFifoQueue<>(LIMIT);
            // reading last LIMIT lines
            for (; (line = br.readLine()) != null; ++i) {
                queue.add(line);
            }

            if (i >= LIMIT * 2) { // if there is more line than 2*LIMIT
                System.out.println(".......");
                System.out.println("....... (skipped content)");
                System.out.println(".......");
            }
            for (String l : queue) { // print rest of the file
                System.out.println(l);
            }

            System.out.println("------------------------------------");
            System.out.println();
        } catch (IOException e) {
            System.out.println("Exception ocurred during accessing file attributes " + e);
        }
    }
}

From source file:com.arpnetworking.metrics.generator.util.TestFileGenerator.java

/**
 * Generates the test file.//  w  w  w. j  a  v  a 2  s.c  om
 */
public void generate() {
    try {
        Files.deleteIfExists(_fileName);
    } catch (final IOException e) {
        throw Throwables.propagate(e);
    }

    final long totalSampleCount = ((long) _uowCount) * _namesCount * _samplesCount;
    LOGGER.info().setEvent("GeneratingFile").setMessage("Starting file generation")
            .addData("file", _fileName.toAbsolutePath()).addData("expectedSamples", totalSampleCount).log();

    final Duration duration = new Duration(_startTime, _endTime);

    final List<MetricGenerator> metricGenerators = Lists.newArrayList();
    for (int x = 0; x < _namesCount; ++x) {
        final GaussianMetricGenerator gaussian = new GaussianMetricGenerator(50d, 8d,
                new SingleNameGenerator(_random));
        final ConstantCountMetricGenerator sampleGenerator = new ConstantCountMetricGenerator(_samplesCount,
                gaussian);
        metricGenerators.add(sampleGenerator);
    }
    final UnitOfWorkGenerator uowGenerator = new UnitOfWorkGenerator(metricGenerators);

    final List<UnitOfWorkSchedule> schedules = Lists.newArrayList();
    final long durationInNanos = TimeUnit.NANOSECONDS.convert(duration.getMillis(), TimeUnit.MILLISECONDS);
    final long periodInNanos = durationInNanos / _uowCount;
    schedules.add(new UnitOfWorkSchedule(uowGenerator, new ConstantTimeScheduler(periodInNanos)));

    final MetricGenerator canary = new ConstantMetricGenerator(5, new SpecifiedName(CANARY));

    // Special canary unit of work schedulers
    // Each UOW generator is guaranteed to be executed once
    final UnitOfWorkGenerator canaryUOW = new UnitOfWorkGenerator(Collections.singletonList(canary));
    schedules
            .add(new UnitOfWorkSchedule(canaryUOW, new ConstantTimeScheduler(durationInNanos + periodInNanos)));

    final IntervalExecutor executor = new IntervalExecutor(_startTime, _endTime, schedules, _fileName,
            _clusterName, _serviceName);
    executor.execute();
    try {
        final BasicFileAttributes attributes = Files.readAttributes(_fileName, BasicFileAttributes.class);
        LOGGER.info().setEvent("GenerationComplete").setMessage("Generation completed successfully")
                .addData("size", attributes.size()).log();
    } catch (final IOException e) {
        LOGGER.warn().setEvent("GenerationComplete")
                .setMessage("Generation completed successfully but unable to read attributes of generated file")
                .setThrowable(e).log();
    }
}