Example usage for org.apache.commons.compress.archivers ArchiveEntry getName

List of usage examples for org.apache.commons.compress.archivers ArchiveEntry getName

Introduction

In this page you can find the example usage for org.apache.commons.compress.archivers ArchiveEntry getName.

Prototype

public String getName();

Source Link

Document

The name of the entry in the archive.

Usage

From source file:org.structr.websocket.command.UnarchiveCommand.java

private void unarchive(final SecurityContext securityContext, final File file)
        throws ArchiveException, IOException, FrameworkException {

    final App app = StructrApp.getInstance(securityContext);
    final InputStream is;

    try (final Tx tx = app.tx()) {

        final String fileName = file.getName();

        logger.log(Level.INFO, "Unarchiving file {0}", fileName);

        is = file.getInputStream();//  www  .  j a  va 2s .c o m
        tx.success();

        if (is == null) {

            getWebSocket().send(MessageBuilder.status().code(400)
                    .message("Could not get input stream from file ".concat(fileName)).build(), true);
            return;
        }
    }

    final ArchiveInputStream in = new ArchiveStreamFactory()
            .createArchiveInputStream(new BufferedInputStream(is));
    ArchiveEntry entry = in.getNextEntry();
    int overallCount = 0;

    while (entry != null) {

        try (final Tx tx = app.tx()) {

            int count = 0;

            while (entry != null && count++ < 50) {

                final String entryPath = "/" + PathHelper.clean(entry.getName());
                logger.log(Level.INFO, "Entry path: {0}", entryPath);

                final AbstractFile f = FileHelper.getFileByAbsolutePath(securityContext, entryPath);
                if (f == null) {

                    final Folder parentFolder = createOrGetParentFolder(securityContext, entryPath);
                    final String name = PathHelper.getName(entry.getName());

                    if (StringUtils.isNotEmpty(name) && (parentFolder == null
                            || !(FileHelper.getFolderPath(parentFolder).equals(entryPath)))) {

                        AbstractFile fileOrFolder = null;

                        if (entry.isDirectory()) {

                            fileOrFolder = app.create(Folder.class, name);

                        } else {

                            fileOrFolder = ImageHelper.isImageType(name)
                                    ? ImageHelper.createImage(securityContext, in, null, Image.class, name,
                                            false)
                                    : FileHelper.createFile(securityContext, in, null, File.class, name);
                        }

                        if (parentFolder != null) {
                            fileOrFolder.setProperty(AbstractFile.parent, parentFolder);
                        }

                        logger.log(Level.INFO, "Created {0} {1} with path {2}", new Object[] {
                                fileOrFolder.getType(), fileOrFolder, FileHelper.getFolderPath(fileOrFolder) });

                        // create thumbnails while importing data
                        if (fileOrFolder instanceof Image) {
                            fileOrFolder.getProperty(Image.tnMid);
                            fileOrFolder.getProperty(Image.tnSmall);
                        }

                    }
                }

                entry = in.getNextEntry();

                overallCount++;
            }

            logger.log(Level.INFO, "Committing transaction after {0} files..", overallCount);

            tx.success();
        }

    }

    in.close();

}

From source file:org.thingsboard.demo.loader.data.DemoData.java

private void readFromArchiveFile(String demoDataArchive, String email) throws Exception {
    InputStream inputStream = new BufferedInputStream(Files.newInputStream(Paths.get(demoDataArchive)));
    CompressorInputStream input = new CompressorStreamFactory().createCompressorInputStream(inputStream);
    ArchiveInputStream archInput = new ArchiveStreamFactory()
            .createArchiveInputStream(new BufferedInputStream(input));
    ArchiveEntry entry;

    byte[] pluginsContent = null;
    byte[] rulesContent = null;
    byte[] customersContent = null;
    byte[] devicesContent = null;
    List<byte[]> dashboardsContent = new ArrayList<>();
    while ((entry = archInput.getNextEntry()) != null) {
        if (!entry.isDirectory()) {
            String name = entry.getName();
            if (name.equals(DEMO_PLUGINS_JSON)) {
                pluginsContent = IOUtils.toByteArray(archInput);
            } else if (name.equals(DEMO_RULES_JSON)) {
                rulesContent = IOUtils.toByteArray(archInput);
            } else if (name.equals(DEMO_CUSTOMERS_JSON)) {
                customersContent = IOUtils.toByteArray(archInput);
            } else if (name.equals(DEMO_DEVICES_JSON)) {
                devicesContent = IOUtils.toByteArray(archInput);
            } else if (name.startsWith(DASHBOARDS_DIR + "/")) {
                byte[] dashboardContent = IOUtils.toByteArray(archInput);
                dashboardsContent.add(dashboardContent);
            }// w  w  w.  j a  v a2  s  .  c o m
        }
    }
    readData(email, pluginsContent, rulesContent, customersContent, devicesContent, dashboardsContent);
}

From source file:org.trustedanalytics.servicebroker.gearpump.service.file.ArchiverService.java

private void unpack(ArchiveInputStream inputStream) throws IOException {
    ArchiveEntry entry;
    while ((entry = inputStream.getNextEntry()) != null) {
        LOGGER.info("Extracting: {}", entry.getName());
        fileWriter.intoDestination(destinationDir + entry.getName()).withOverride(shouldOverrideFiles)
                .writeToFile(inputStream, entry.isDirectory());
    }//w  w  w .j ava  2 s. co  m
}

From source file:org.ut.biolab.medsavant.shared.util.IOUtils.java

private static List<File> unArchive(File dest, ArchiveInputStream ais) throws IOException {
    List<File> files = new ArrayList<File>();
    ArchiveEntry archiveEntry = ais.getNextEntry();
    // tarIn is a TarArchiveInputStream
    while (archiveEntry != null) {// create a file with the same name as the tarEntry            
        File destPath = new File(dest, archiveEntry.getName());

        if (archiveEntry.isDirectory()) {
            destPath.mkdirs();//w  ww  .  j av  a2 s  . co  m
        } else {
            destPath.createNewFile();
            byte[] btoRead = new byte[1024];

            BufferedOutputStream bout = new BufferedOutputStream(new FileOutputStream(destPath));

            int len = 0;
            while ((len = ais.read(btoRead)) != -1) {
                bout.write(btoRead, 0, len);
            }

            bout.close();
            btoRead = null;
            files.add(destPath);
        }

        archiveEntry = ais.getNextEntry();
    }
    ais.close();
    return files;
}

From source file:org.vafer.jdeb.ArchiveWalker.java

public static void walk(ArchiveInputStream in, ArchiveVisitor visitor) throws IOException {
    try {//from   w w  w.  j  ava 2s. c o  m
        ArchiveEntry entry;
        while ((entry = in.getNextEntry()) != null) {
            byte[] content = new byte[(int) entry.getSize()];
            if (entry.getSize() > 0) {
                int length = in.read(content);
                if (length != entry.getSize()) {
                    throw new IOException("Couldn't read entry " + entry.getName() + " : read " + length
                            + ", expected " + entry.getSize());
                }
            }

            visitor.visit(entry, content);
        }
    } finally {
        in.close();
    }
}

From source file:org.vafer.jdeb.producers.DataProducerArchive.java

public void produce(final DataConsumer pReceiver) throws IOException {

    InputStream is = new BufferedInputStream(new FileInputStream(archive));

    CompressorInputStream compressorInputStream = null;

    try {/*w w w  .j ava 2  s.c o  m*/
        compressorInputStream = new CompressorStreamFactory().createCompressorInputStream(is);
    } catch (CompressorException e) {
        // expected if the input file is a zip archive
    }

    if (compressorInputStream != null) {
        is = new BufferedInputStream(compressorInputStream);
    }

    ArchiveInputStream archiveInputStream = null;

    try {
        archiveInputStream = new ArchiveStreamFactory().createArchiveInputStream(is);
    } catch (ArchiveException e) {
        throw new IOException("Unsupported archive format: " + archive, e);
    }

    EntryConverter converter = null;

    if (archiveInputStream instanceof TarArchiveInputStream) {

        converter = new EntryConverter() {
            public TarArchiveEntry convert(ArchiveEntry entry) {
                TarArchiveEntry src = (TarArchiveEntry) entry;
                TarArchiveEntry dst = new TarArchiveEntry(src.getName(), true);

                dst.setSize(src.getSize());
                dst.setGroupName(src.getGroupName());
                dst.setGroupId(src.getGroupId());
                dst.setUserId(src.getUserId());
                dst.setMode(src.getMode());
                dst.setModTime(src.getModTime());

                return dst;
            }
        };

    } else if (archiveInputStream instanceof ZipArchiveInputStream) {

        converter = new EntryConverter() {
            public TarArchiveEntry convert(ArchiveEntry entry) {
                ZipArchiveEntry src = (ZipArchiveEntry) entry;
                TarArchiveEntry dst = new TarArchiveEntry(src.getName(), true);

                dst.setSize(src.getSize());
                dst.setMode(src.getUnixMode());
                dst.setModTime(src.getTime());

                return dst;
            }
        };

    } else {
        throw new IOException("Unsupported archive format: " + archive);
    }

    try {
        while (true) {

            ArchiveEntry archiveEntry = archiveInputStream.getNextEntry();

            if (archiveEntry == null) {
                break;
            }

            if (!isIncluded(archiveEntry.getName())) {
                continue;
            }

            TarArchiveEntry entry = converter.convert(archiveEntry);

            entry = map(entry);

            if (entry.isDirectory()) {
                pReceiver.onEachDir(entry.getName(), entry.getLinkName(), entry.getUserName(),
                        entry.getUserId(), entry.getGroupName(), entry.getGroupId(), entry.getMode(),
                        entry.getSize());
                continue;
            }
            pReceiver.onEachFile(archiveInputStream, entry.getName(), entry.getLinkName(), entry.getUserName(),
                    entry.getUserId(), entry.getGroupName(), entry.getGroupId(), entry.getMode(),
                    entry.getSize());
        }

    } finally {
        if (archiveInputStream != null) {
            archiveInputStream.close();
        }
    }
}

From source file:org.voyanttools.trombone.input.expand.ArchiveExpander.java

/**
 * Get a list of stored document sources from the specified archive stream
 * (that corresponds to the specfied parent stored document source).
 * /* w  w  w.jav  a 2 s  . c  o m*/
 * @param archiveInputStream the full archive input stream
 * @param parentStoredDocumentSource the parent stored document source
 * @return a list of stored document sources in this archive
 * @throws IOException thrown when an IO exception occurs during unarchiving
 */
private List<StoredDocumentSource> getExpandedDocumentSources(ArchiveInputStream archiveInputStream,
        StoredDocumentSource parentStoredDocumentSource) throws IOException {

    List<StoredDocumentSource> expandedDocumentSources = new ArrayList<StoredDocumentSource>();

    ArchiveEntry archiveEntry = archiveInputStream.getNextEntry();
    String parentId = parentStoredDocumentSource.getId();
    DocumentMetadata parentMetadata = parentStoredDocumentSource.getMetadata();
    while (archiveEntry != null) {

        if (archiveEntry.isDirectory() == false) {
            final String filename = archiveEntry.getName();
            final File file = new File(filename);

            // skip directories and skippable files
            if (DocumentFormat.isSkippable(file) == false) {
                DocumentMetadata childMetadata = parentMetadata.asParent(parentStoredDocumentSource.getId(),
                        DocumentMetadata.ParentType.EXPANSION);
                childMetadata.setLocation(file.toString());
                childMetadata.setModified(archiveEntry.getLastModifiedDate().getTime());
                childMetadata.setSource(Source.STREAM);
                childMetadata.setTitle(file.getName().replaceFirst("\\.\\w+$", ""));
                String id = DigestUtils.md5Hex(parentId + filename);
                InputSource inputSource = new InputStreamInputSource(id, childMetadata,
                        new CloseShieldInputStream(archiveInputStream));
                StoredDocumentSource storedDocumentSource = storedDocumentSourceStorage
                        .getStoredDocumentSource(inputSource);
                expandedDocumentSources
                        .addAll(this.expander.getExpandedStoredDocumentSources(storedDocumentSource)); // expand this recursively
            }
        }
        archiveEntry = archiveInputStream.getNextEntry();
    }

    return expandedDocumentSources;
}

From source file:org.whitesource.docker.DockerAgent.java

/**
 * Extract matching files from the tar archive.
 *//* w  w w  .ja va 2  s  .c  o m*/
public void extractTarArchive(File containerTarFile, File containerTarExtractDir) {
    TarArchiveInputStream tais = null;
    FileInputStream fis = null;
    try {
        fis = new FileInputStream(containerTarFile);
        tais = new TarArchiveInputStream(fis);
        ArchiveEntry entry = tais.getNextEntry();
        while (entry != null) {
            if (!entry.isDirectory()) {
                String entryName = entry.getName();
                String lowerCaseName = entryName.toLowerCase();
                if (lowerCaseName.matches(SOURCE_FILE_PATTERN) || lowerCaseName.matches(BINARY_FILE_PATTERN)
                        || lowerCaseName.matches(ARCHIVE_FILE_PATTERN)) {
                    File file = new File(containerTarExtractDir, entryName);
                    File parent = file.getParentFile();
                    if (!parent.exists()) {
                        parent.mkdirs();
                    }
                    OutputStream out = new FileOutputStream(file);
                    IOUtils.copy(tais, out);
                    out.close();
                }
            }
            entry = tais.getNextTarEntry();
        }
    } catch (FileNotFoundException e) {
        logger.warn("Error extracting files from {}: {}", containerTarFile.getPath(), e.getMessage());
    } catch (IOException e) {
        logger.warn("Error extracting files from {}: {}", containerTarFile.getPath(), e.getMessage());
    } finally {
        IOUtils.closeQuietly(tais);
        IOUtils.closeQuietly(fis);
    }
}

From source file:org.wildfly.plugin.common.Archives.java

/**
 * Unzips the zip file to the target directory.
 * <p>//  w  w w  . ja v  a  2s  .  co m
 * Note this is specific to how WildFly is archived. The first directory is assumed to be the base home directory
 * and will returned.
 * </p>
 *
 * @param archiveFile     the archive to uncompress, can be a {@code .zip} or {@code .tar.gz}
 * @param targetDir       the directory to extract the zip file to
 * @param replaceIfExists if {@code true} replace the existing files if they exist
 *
 * @return the path to the extracted directory
 *
 * @throws java.io.IOException if an I/O error occurs
 */
@SuppressWarnings("WeakerAccess")
public static Path uncompress(final Path archiveFile, final Path targetDir, final boolean replaceIfExists)
        throws IOException {
    final Path archive = getArchive(archiveFile);

    Path firstDir = null;

    try (ArchiveInputStream in = new ArchiveStreamFactory()
            .createArchiveInputStream(new BufferedInputStream(Files.newInputStream(archive)))) {
        ArchiveEntry entry;
        while ((entry = in.getNextEntry()) != null) {
            final Path extractTarget = targetDir.resolve(entry.getName());
            if (!replaceIfExists && Files.exists(extractTarget)) {
                if (entry.isDirectory() && firstDir == null) {
                    firstDir = extractTarget;
                }
                continue;
            }
            if (entry.isDirectory()) {
                final Path dir = Files.createDirectories(extractTarget);
                if (firstDir == null) {
                    firstDir = dir;
                }
            } else {
                Files.createDirectories(extractTarget.getParent());
                Files.copy(in, extractTarget);
            }
        }
        return firstDir == null ? targetDir : firstDir;
    } catch (ArchiveException e) {
        throw new IOException(e);
    }
}

From source file:org.wildfly.plugin.server.Archives.java

/**
 * Unzips the zip file to the target directory.
 *
 * @param zipFile   the zip file to unzip
 * @param targetDir the directory to extract the zip file to
 *
 * @throws java.io.IOException if an I/O error occurs
 *///from  w  w  w . j  a v a  2 s.com
public static void unzip(final Path zipFile, final Path targetDir) throws IOException {
    final Path archive = getArchive(zipFile);

    try (final ArchiveInputStream in = new ArchiveStreamFactory()
            .createArchiveInputStream(new BufferedInputStream(Files.newInputStream(archive)))) {
        ArchiveEntry entry;
        while ((entry = in.getNextEntry()) != null) {
            final Path extractTarget = targetDir.resolve(entry.getName());
            if (entry.isDirectory()) {
                Files.createDirectories(extractTarget);
            } else {
                Files.createDirectories(extractTarget.getParent());
                Files.copy(in, extractTarget);
            }
        }
    } catch (ArchiveException e) {
        throw new IOException(e);
    }
}