Example usage for org.apache.commons.compress.archivers.tar TarArchiveEntry isDirectory

List of usage examples for org.apache.commons.compress.archivers.tar TarArchiveEntry isDirectory

Introduction

In this page you can find the example usage for org.apache.commons.compress.archivers.tar TarArchiveEntry isDirectory.

Prototype

public boolean isDirectory() 

Source Link

Document

Return whether or not this entry represents a directory.

Usage

From source file:org.apache.hadoop.fs.tar.TarFileSystem.java

@Override
public FileStatus getFileStatus(Path f) throws IOException {
    FileStatus fstatus = null;//www.  j a v a2  s. c  o m
    Path abs = makeAbsolute(f);
    Path baseTar = getBaseTarPath(abs);
    String inFile = getFileInArchive(abs);

    FileStatus underlying = underlyingFS.getFileStatus(baseTar);

    if (inFile == null) {
        // return the status of the tar itself but make it a dir
        fstatus = new FileStatus(underlying.getLen(), true, underlying.getReplication(),
                underlying.getBlockSize(), underlying.getModificationTime(), underlying.getAccessTime(),
                underlying.getPermission(), underlying.getOwner(), underlying.getGroup(), abs);
    }

    else {
        long offset = index.getOffset(inFile);

        FSDataInputStream in = underlyingFS.open(baseTar);
        in.seek(offset - 512);
        TarArchiveEntry entry = readHeaderEntry(in);

        if (!entry.getName().equals(inFile)) {
            LOG.fatal("Index file is corrupt." + "Requested filename is present in index "
                    + "but absent in TAR.");
            throw new IOException("NBU-TAR: FATAL: entry file name " + "does not match requested file name");
        }

        // Construct a FileStatus object 
        fstatus = new FileStatus(entry.getSize(), entry.isDirectory(), (int) underlying.getReplication(),
                underlying.getBlockSize(), entry.getModTime().getTime(), underlying.getAccessTime(),
                new FsPermission((short) entry.getMode()), entry.getUserName(), entry.getGroupName(), abs);
    }
    return fstatus;
}

From source file:org.apache.hadoop.hive.common.CompressionUtils.java

/**
 * Untar an input file into an output file.
 *
 * The output file is created in the output folder, having the same name as the input file, minus
 * the '.tar' extension./*  www  .ja v a 2s . com*/
 *
 * @param inputFileName the input .tar file
 * @param outputDirName the output directory file.
 * @throws IOException
 * @throws FileNotFoundException
 *
 * @return The {@link List} of {@link File}s with the untared content.
 * @throws ArchiveException
 */
public static List<File> unTar(final String inputFileName, final String outputDirName, boolean flatten)
        throws FileNotFoundException, IOException, ArchiveException {

    File inputFile = new File(inputFileName);
    File outputDir = new File(outputDirName);

    final List<File> untaredFiles = new LinkedList<File>();
    final InputStream is;

    if (inputFileName.endsWith(".gz")) {
        is = new GzipCompressorInputStream(new FileInputStream(inputFile));
    } else {
        is = new FileInputStream(inputFile);
    }

    final TarArchiveInputStream debInputStream = (TarArchiveInputStream) new ArchiveStreamFactory()
            .createArchiveInputStream("tar", is);
    TarArchiveEntry entry = null;
    while ((entry = (TarArchiveEntry) debInputStream.getNextEntry()) != null) {
        final File outputFile = new File(outputDir, entry.getName());
        if (entry.isDirectory()) {
            if (flatten) {
                // no sub-directories
                continue;
            }
            LOG.debug(String.format("Attempting to write output directory %s.", outputFile.getAbsolutePath()));
            if (!outputFile.exists()) {
                LOG.debug(String.format("Attempting to create output directory %s.",
                        outputFile.getAbsolutePath()));
                if (!outputFile.mkdirs()) {
                    throw new IllegalStateException(
                            String.format("Couldn't create directory %s.", outputFile.getAbsolutePath()));
                }
            }
        } else {
            final OutputStream outputFileStream;
            if (flatten) {
                File flatOutputFile = new File(outputDir, outputFile.getName());
                LOG.debug(String.format("Creating flat output file %s.", flatOutputFile.getAbsolutePath()));
                outputFileStream = new FileOutputStream(flatOutputFile);
            } else if (!outputFile.getParentFile().exists()) {
                LOG.debug(String.format("Attempting to create output directory %s.",
                        outputFile.getParentFile().getAbsoluteFile()));
                if (!outputFile.getParentFile().getAbsoluteFile().mkdirs()) {
                    throw new IllegalStateException(String.format("Couldn't create directory %s.",
                            outputFile.getParentFile().getAbsolutePath()));
                }
                LOG.debug(String.format("Creating output file %s.", outputFile.getAbsolutePath()));
                outputFileStream = new FileOutputStream(outputFile);
            } else {
                outputFileStream = new FileOutputStream(outputFile);
            }
            IOUtils.copy(debInputStream, outputFileStream);
            outputFileStream.close();
        }
        untaredFiles.add(outputFile);
    }
    debInputStream.close();

    return untaredFiles;
}

From source file:org.apache.ignite.testsuites.IgniteHadoopTestSuite.java

/**
 *  Downloads and extracts an Apache product.
 *
 * @param appName Name of application for log messages.
 * @param homeVariable Pointer to home directory of the component.
 * @param downloadPath Relative download path of tar package.
 * @param destName Local directory name to install component.
 * @throws Exception If failed.//from www  .  ja v a  2s.  co m
 */
private static void download(String appName, String homeVariable, String downloadPath, String destName)
        throws Exception {
    String homeVal = IgniteSystemProperties.getString(homeVariable);

    if (!F.isEmpty(homeVal) && new File(homeVal).isDirectory()) {
        X.println(homeVariable + " is set to: " + homeVal);

        return;
    }

    List<String> urls = F.asList("http://archive.apache.org/dist/", "http://apache-mirror.rbc.ru/pub/apache/",
            "http://www.eu.apache.org/dist/", "http://www.us.apache.org/dist/");

    String tmpPath = System.getProperty("java.io.tmpdir");

    X.println("tmp: " + tmpPath);

    final File install = new File(tmpPath + File.separatorChar + "__hadoop");

    final File home = new File(install, destName);

    X.println("Setting " + homeVariable + " to " + home.getAbsolutePath());

    System.setProperty(homeVariable, home.getAbsolutePath());

    final File successFile = new File(home, "__success");

    if (home.exists()) {
        if (successFile.exists()) {
            X.println(appName + " distribution already exists.");

            return;
        }

        X.println(appName + " distribution is invalid and it will be deleted.");

        if (!U.delete(home))
            throw new IOException("Failed to delete directory: " + home.getAbsolutePath());
    }

    for (String url : urls) {
        if (!(install.exists() || install.mkdirs()))
            throw new IOException("Failed to create directory: " + install.getAbsolutePath());

        URL u = new URL(url + downloadPath);

        X.println("Attempting to download from: " + u);

        try {
            URLConnection c = u.openConnection();

            c.connect();

            try (TarArchiveInputStream in = new TarArchiveInputStream(
                    new GzipCompressorInputStream(new BufferedInputStream(c.getInputStream(), 32 * 1024)))) {

                TarArchiveEntry entry;

                while ((entry = in.getNextTarEntry()) != null) {
                    File dest = new File(install, entry.getName());

                    if (entry.isDirectory()) {
                        if (!dest.mkdirs())
                            throw new IllegalStateException();
                    } else if (entry.isSymbolicLink()) {
                        // Important: in Hadoop installation there are symlinks, we need to create them:
                        Path theLinkItself = Paths.get(install.getAbsolutePath(), entry.getName());

                        Path linkTarget = Paths.get(entry.getLinkName());

                        Files.createSymbolicLink(theLinkItself, linkTarget);
                    } else {
                        File parent = dest.getParentFile();

                        if (!(parent.exists() || parent.mkdirs()))
                            throw new IllegalStateException();

                        X.print(" [" + dest);

                        try (BufferedOutputStream out = new BufferedOutputStream(
                                new FileOutputStream(dest, false), 128 * 1024)) {
                            U.copy(in, out);

                            out.flush();
                        }

                        Files.setPosixFilePermissions(dest.toPath(), modeToPermissionSet(entry.getMode()));

                        X.println("]");
                    }
                }
            }

            if (successFile.createNewFile())
                return;
        } catch (Exception e) {
            e.printStackTrace();

            U.delete(home);
        }
    }

    throw new IllegalStateException("Failed to install " + appName + ".");
}

From source file:org.apache.kylin.common.util.TarGZUtil.java

public static void uncompressTarGZ(File tarFile, File dest) throws IOException {
    dest.mkdir();/*www  .  j  a va 2 s .co  m*/
    TarArchiveInputStream tarIn = null;

    tarIn = new TarArchiveInputStream(
            new GzipCompressorInputStream(new BufferedInputStream(new FileInputStream(tarFile))));

    TarArchiveEntry tarEntry = tarIn.getNextTarEntry();
    // tarIn is a TarArchiveInputStream
    while (tarEntry != null) {// create a file with the same name as the tarEntry
        File destPath = new File(dest, tarEntry.getName());
        System.out.println("working: " + destPath.getCanonicalPath());
        if (tarEntry.isDirectory()) {
            destPath.mkdirs();
        } else {
            destPath.createNewFile();
            //byte [] btoRead = new byte[(int)tarEntry.getSize()];
            byte[] btoRead = new byte[1024];
            //FileInputStream fin 
            //  = new FileInputStream(destPath.getCanonicalPath());
            BufferedOutputStream bout = new BufferedOutputStream(new FileOutputStream(destPath));
            int len = 0;

            while ((len = tarIn.read(btoRead)) != -1) {
                bout.write(btoRead, 0, len);
            }

            bout.close();
            btoRead = null;

        }
        tarEntry = tarIn.getNextTarEntry();
    }
    tarIn.close();
}

From source file:org.apache.nifi.processors.hadoop.TarUnpackerSequenceFileWriter.java

@Override
protected void processInputStream(final InputStream stream, final FlowFile tarArchivedFlowFile,
        final Writer writer) throws IOException {
    try (final TarArchiveInputStream tarIn = new TarArchiveInputStream(new BufferedInputStream(stream))) {
        TarArchiveEntry tarEntry;
        while ((tarEntry = tarIn.getNextTarEntry()) != null) {
            if (tarEntry.isDirectory()) {
                continue;
            }//from   ww  w  .ja v a  2 s  .  co m
            final String key = tarEntry.getName();
            final long fileSize = tarEntry.getSize();
            final InputStreamWritable inStreamWritable = new InputStreamWritable(tarIn, (int) fileSize);
            writer.append(new Text(key), inStreamWritable);
            logger.debug("Appending FlowFile {} to Sequence File", new Object[] { key });
        }
    }
}

From source file:org.apache.sqoop.test.utils.CompressionUtils.java

/**
 * Untar given stream (tar.gz archive) to given directory.
 *
 * Directory structure will be preserved.
 *
 * @param inputStream InputStream of tar.gz archive
 * @param targetDirectory Target directory for tarball content
 * @throws IOException/* w ww  . java 2 s .com*/
 */
public static void untarStreamToDirectory(InputStream inputStream, String targetDirectory) throws IOException {
    assert inputStream != null;
    assert targetDirectory != null;

    LOG.info("Untaring archive to directory: " + targetDirectory);

    TarArchiveInputStream in = new TarArchiveInputStream(new GzipCompressorInputStream(inputStream));
    TarArchiveEntry entry = null;

    int BUFFER_SIZE = 2048;

    while ((entry = (TarArchiveEntry) in.getNextEntry()) != null) {
        LOG.info("Untaring file: " + entry.getName());

        if (entry.isDirectory()) {
            (new File(HdfsUtils.joinPathFragments(targetDirectory, entry.getName()))).mkdirs();
        } else {
            int count;
            byte data[] = new byte[BUFFER_SIZE];

            FileOutputStream fos = new FileOutputStream(
                    HdfsUtils.joinPathFragments(targetDirectory, entry.getName()));
            BufferedOutputStream dest = new BufferedOutputStream(fos, BUFFER_SIZE);
            while ((count = in.read(data, 0, BUFFER_SIZE)) != -1) {
                dest.write(data, 0, count);
            }
            dest.close();
        }
    }
    in.close();
}

From source file:org.apache.storm.utils.ServerUtils.java

private static void unpackEntries(TarArchiveInputStream tis, TarArchiveEntry entry, File outputDir)
        throws IOException {
    if (entry.isDirectory()) {
        File subDir = new File(outputDir, entry.getName());
        if (!subDir.mkdirs() && !subDir.isDirectory()) {
            throw new IOException("Mkdirs failed to create tar internal dir " + outputDir);
        }//  w ww  .  ja  va2 s  .  c o m
        for (TarArchiveEntry e : entry.getDirectoryEntries()) {
            unpackEntries(tis, e, subDir);
        }
        return;
    }
    File outputFile = new File(outputDir, entry.getName());
    if (!outputFile.getParentFile().exists()) {
        if (!outputFile.getParentFile().mkdirs()) {
            throw new IOException("Mkdirs failed to create tar internal dir " + outputDir);
        }
    }
    int count;
    byte data[] = new byte[2048];
    BufferedOutputStream outputStream = new BufferedOutputStream(new FileOutputStream(outputFile));

    while ((count = tis.read(data)) != -1) {
        outputStream.write(data, 0, count);
    }
    outputStream.flush();
    outputStream.close();
}

From source file:org.canova.api.util.ArchiveUtils.java

/**
 * Extracts files to the specified destination
 * @param file the file to extract to//  www.  java2  s .c o  m
 * @param dest the destination directory
 * @throws java.io.IOException
 */
public static void unzipFileTo(String file, String dest) throws IOException {
    File target = new File(file);
    if (!target.exists())
        throw new IllegalArgumentException("Archive doesnt exist");
    FileInputStream fin = new FileInputStream(target);
    int BUFFER = 2048;
    byte data[] = new byte[BUFFER];

    if (file.endsWith(".zip")) {
        //getFromOrigin the zip file content
        ZipInputStream zis = new ZipInputStream(fin);
        //getFromOrigin the zipped file list entry
        ZipEntry ze = zis.getNextEntry();

        while (ze != null) {
            String fileName = ze.getName();

            File newFile = new File(dest + File.separator + fileName);

            if (ze.isDirectory()) {
                newFile.mkdirs();
                zis.closeEntry();
                ze = zis.getNextEntry();
                continue;
            }

            log.info("file unzip : " + newFile.getAbsoluteFile());

            //create all non exists folders
            //else you will hit FileNotFoundException for compressed folder

            FileOutputStream fos = new FileOutputStream(newFile);

            int len;
            while ((len = zis.read(data)) > 0) {
                fos.write(data, 0, len);
            }

            fos.flush();
            fos.close();
            zis.closeEntry();
            ze = zis.getNextEntry();
        }

        zis.close();

    }

    else if (file.endsWith(".tar")) {

        BufferedInputStream in = new BufferedInputStream(fin);
        TarArchiveInputStream tarIn = new TarArchiveInputStream(in);

        TarArchiveEntry entry = null;

        /** Read the tar entries using the getNextEntry method **/

        while ((entry = (TarArchiveEntry) tarIn.getNextEntry()) != null) {

            log.info("Extracting: " + entry.getName());

            /** If the entry is a directory, createComplex the directory. **/

            if (entry.isDirectory()) {

                File f = new File(dest + File.separator + entry.getName());
                f.mkdirs();
            }
            /**
             * If the entry is a file,write the decompressed file to the disk
             * and close destination stream.
             **/
            else {
                int count;

                FileOutputStream fos = new FileOutputStream(dest + File.separator + entry.getName());
                BufferedOutputStream destStream = new BufferedOutputStream(fos, BUFFER);
                while ((count = tarIn.read(data, 0, BUFFER)) != -1) {
                    destStream.write(data, 0, count);
                }

                destStream.flush();
                ;

                IOUtils.closeQuietly(destStream);
            }
        }

        /** Close the input stream **/

        tarIn.close();
    }

    else if (file.endsWith(".tar.gz") || file.endsWith(".tgz")) {

        BufferedInputStream in = new BufferedInputStream(fin);
        GzipCompressorInputStream gzIn = new GzipCompressorInputStream(in);
        TarArchiveInputStream tarIn = new TarArchiveInputStream(gzIn);

        TarArchiveEntry entry = null;

        /** Read the tar entries using the getNextEntry method **/

        while ((entry = (TarArchiveEntry) tarIn.getNextEntry()) != null) {

            log.info("Extracting: " + entry.getName());

            /** If the entry is a directory, createComplex the directory. **/

            if (entry.isDirectory()) {

                File f = new File(dest + File.separator + entry.getName());
                f.mkdirs();
            }
            /**
             * If the entry is a file,write the decompressed file to the disk
             * and close destination stream.
             **/
            else {
                int count;

                FileOutputStream fos = new FileOutputStream(dest + File.separator + entry.getName());
                BufferedOutputStream destStream = new BufferedOutputStream(fos, BUFFER);
                while ((count = tarIn.read(data, 0, BUFFER)) != -1) {
                    destStream.write(data, 0, count);
                }

                destStream.flush();

                IOUtils.closeQuietly(destStream);
            }
        }

        /** Close the input stream **/

        tarIn.close();
    }

    else if (file.endsWith(".gz")) {
        GZIPInputStream is2 = new GZIPInputStream(fin);
        File extracted = new File(target.getParent(), target.getName().replace(".gz", ""));
        if (extracted.exists())
            extracted.delete();
        extracted.createNewFile();
        OutputStream fos = FileUtils.openOutputStream(extracted);
        IOUtils.copyLarge(is2, fos);
        is2.close();
        fos.flush();
        fos.close();
    }

}

From source file:org.cloudifysource.esc.util.TarGzUtils.java

/**
 * Extract a tar.gz file./* w w w.j a  v  a  2s  .com*/
 * 
 * @param source
 *            The file to extract from.
 * @param destination
 *            The destination folder.
 * @throws IOException
 *             An error occured during the extraction.
 */
public static void extract(final File source, final String destination) throws IOException {

    LOGGER.fine(String.format("Extracting %s to %s", source.getName(), destination));

    if (!FilenameUtils.getExtension(source.getName().toLowerCase()).equals("gz")) {
        throw new IllegalArgumentException("Expecting tar.gz file: " + source.getAbsolutePath());
    }
    if (!new File(destination).isDirectory()) {
        throw new IllegalArgumentException("Destination should be a folder: " + destination);
    }

    /** create a TarArchiveInputStream object. **/
    FileInputStream fin = new FileInputStream(source);
    BufferedInputStream in = new BufferedInputStream(fin);
    GzipCompressorInputStream gzIn = new GzipCompressorInputStream(in);
    TarArchiveInputStream tarIn = new TarArchiveInputStream(gzIn);

    TarArchiveEntry entry = null;

    /** Read the tar entries using the getNextEntry method **/
    while ((entry = (TarArchiveEntry) tarIn.getNextEntry()) != null) {

        LOGGER.finer("Extracting: " + entry.getName());

        /** If the entry is a directory, create the directory. **/
        if (entry.isDirectory()) {

            File f = new File(destination, entry.getName());
            f.mkdirs();
        } else {
            int count;
            byte[] data = new byte[BUFFER];
            FileOutputStream fos = new FileOutputStream(new File(destination, entry.getName()));
            BufferedOutputStream dest = new BufferedOutputStream(fos, BUFFER);
            while ((count = tarIn.read(data, 0, BUFFER)) != -1) {
                dest.write(data, 0, count);
            }
            dest.close();
        }
    }

    /** Close the input stream **/
    tarIn.close();
}

From source file:org.cmuchimps.gort.modules.helper.CompressionHelper.java

public static void untar(File f) {
    if (f == null || !f.exists() || !f.canRead()) {
        return;// w  w w .  j  a v  a  2  s.c  o m
    } else {
        System.out.println("Untarring file: " + f.getAbsolutePath());
    }

    File parent = f.getParentFile();

    if (parent == null || !parent.exists() || !parent.canWrite()) {
        return;
    }

    FileInputStream fin = null;
    BufferedInputStream in = null;
    TarArchiveInputStream tarIn = null;
    TarArchiveEntry entry = null;

    try {
        fin = new FileInputStream(f);
        in = new BufferedInputStream(fin);
        tarIn = new TarArchiveInputStream(in);

        while ((entry = (TarArchiveEntry) tarIn.getNextEntry()) != null) {
            final File outputFile = new File(parent, entry.getName());
            if (entry.isDirectory()) {
                if (!outputFile.exists()) {
                    outputFile.mkdirs();
                }
            } else {
                final FileOutputStream outputFileStream = new FileOutputStream(outputFile);
                IOUtils.copy(tarIn, outputFileStream);
                outputFileStream.close();
            }

            //System.out.println("Processed: " + outputFile.getAbsolutePath());
        }

    } catch (FileNotFoundException ex) {
        ex.printStackTrace();
        ;
    } catch (IOException e) {
        e.printStackTrace();
    } finally {
        if (in != null) {
            try {
                in.close();
            } catch (IOException ex) {
                ex.printStackTrace();
            }
        }

        if (fin != null) {
            try {
                fin.close();
            } catch (IOException ex) {
                ex.printStackTrace();
            }
        }

        if (tarIn != null) {
            try {
                tarIn.close();
            } catch (IOException ex) {
                ex.printStackTrace();
            }
        }
    }
}