List of usage examples for org.apache.commons.compress.archivers.tar TarArchiveOutputStream closeArchiveEntry
public void closeArchiveEntry() throws IOException
From source file:org.apache.camel.processor.aggregate.tarfile.TarAggregationStrategy.java
private static void addEntryToTar(File source, String entryName, byte[] buffer, int length) throws IOException, ArchiveException { File tmpTar = File.createTempFile(source.getName(), null); tmpTar.delete();//from w ww.j a v a 2 s . co m if (!source.renameTo(tmpTar)) { throw new IOException("Cannot create temp file: " + source.getName()); } TarArchiveInputStream tin = (TarArchiveInputStream) new ArchiveStreamFactory() .createArchiveInputStream(ArchiveStreamFactory.TAR, new FileInputStream(tmpTar)); TarArchiveOutputStream tos = new TarArchiveOutputStream(new FileOutputStream(source)); tos.setLongFileMode(TarArchiveOutputStream.LONGFILE_POSIX); tos.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_POSIX); // copy the existing entries ArchiveEntry nextEntry; while ((nextEntry = tin.getNextEntry()) != null) { tos.putArchiveEntry(nextEntry); IOUtils.copy(tin, tos); tos.closeArchiveEntry(); } // Create new entry TarArchiveEntry entry = new TarArchiveEntry(entryName); entry.setSize(length); tos.putArchiveEntry(entry); tos.write(buffer, 0, length); tos.closeArchiveEntry(); IOHelper.close(tin); IOHelper.close(tos); }
From source file:org.apache.hadoop.hive.common.CompressionUtils.java
/** * Archive all the files in the inputFiles into outputFile * * @param inputFiles//from w w w .ja va 2s . c om * @param outputFile * @throws IOException */ public static void tar(String parentDir, String[] inputFiles, String outputFile) throws IOException { FileOutputStream out = null; try { out = new FileOutputStream(new File(parentDir, outputFile)); TarArchiveOutputStream tOut = new TarArchiveOutputStream( new GzipCompressorOutputStream(new BufferedOutputStream(out))); for (int i = 0; i < inputFiles.length; i++) { File f = new File(parentDir, inputFiles[i]); TarArchiveEntry tarEntry = new TarArchiveEntry(f, f.getName()); tOut.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU); tOut.putArchiveEntry(tarEntry); FileInputStream input = new FileInputStream(f); try { IOUtils.copy(input, tOut); // copy with 8K buffer, not close } finally { input.close(); } tOut.closeArchiveEntry(); } tOut.close(); // finishes inside } finally { // TarArchiveOutputStream seemed not to close files properly in error situation org.apache.hadoop.io.IOUtils.closeStream(out); } }
From source file:org.apache.hadoop.yarn.util.TestFSDownload.java
static LocalResource createTarFile(FileContext files, Path p, int len, Random r, LocalResourceVisibility vis) throws IOException, URISyntaxException { byte[] bytes = new byte[len]; r.nextBytes(bytes);// w w w. java 2 s . c om File archiveFile = new File(p.toUri().getPath() + ".tar"); archiveFile.createNewFile(); TarArchiveOutputStream out = new TarArchiveOutputStream(new FileOutputStream(archiveFile)); TarArchiveEntry entry = new TarArchiveEntry(p.getName()); entry.setSize(bytes.length); out.putArchiveEntry(entry); out.write(bytes); out.closeArchiveEntry(); out.close(); LocalResource ret = recordFactory.newRecordInstance(LocalResource.class); ret.setResource(URL.fromPath(new Path(p.toString() + ".tar"))); ret.setSize(len); ret.setType(LocalResourceType.ARCHIVE); ret.setVisibility(vis); ret.setTimestamp(files.getFileStatus(new Path(p.toString() + ".tar")).getModificationTime()); return ret; }
From source file:org.apache.hadoop.yarn.util.TestFSDownload.java
static LocalResource createTgzFile(FileContext files, Path p, int len, Random r, LocalResourceVisibility vis) throws IOException, URISyntaxException { byte[] bytes = new byte[len]; r.nextBytes(bytes);/*w w w.j a v a 2 s .c om*/ File gzipFile = new File(p.toUri().getPath() + ".tar.gz"); gzipFile.createNewFile(); TarArchiveOutputStream out = new TarArchiveOutputStream( new GZIPOutputStream(new FileOutputStream(gzipFile))); TarArchiveEntry entry = new TarArchiveEntry(p.getName()); entry.setSize(bytes.length); out.putArchiveEntry(entry); out.write(bytes); out.closeArchiveEntry(); out.close(); LocalResource ret = recordFactory.newRecordInstance(LocalResource.class); ret.setResource(URL.fromPath(new Path(p.toString() + ".tar.gz"))); ret.setSize(len); ret.setType(LocalResourceType.ARCHIVE); ret.setVisibility(vis); ret.setTimestamp(files.getFileStatus(new Path(p.toString() + ".tar.gz")).getModificationTime()); return ret; }
From source file:org.apache.karaf.tooling.ArchiveMojo.java
private void addFileToTarGz(TarArchiveOutputStream tOut, Path f, String base) throws IOException { if (Files.isDirectory(f)) { String entryName = base + f.getFileName().toString() + "/"; TarArchiveEntry tarEntry = new TarArchiveEntry(entryName); tOut.putArchiveEntry(tarEntry);// ww w.ja v a2 s . co m tOut.closeArchiveEntry(); try (DirectoryStream<Path> children = Files.newDirectoryStream(f)) { for (Path child : children) { addFileToTarGz(tOut, child, entryName); } } } else if (useSymLinks && Files.isSymbolicLink(f)) { String entryName = base + f.getFileName().toString(); TarArchiveEntry tarEntry = new TarArchiveEntry(entryName, TarConstants.LF_SYMLINK); tarEntry.setLinkName(Files.readSymbolicLink(f).toString()); tOut.putArchiveEntry(tarEntry); tOut.closeArchiveEntry(); } else { String entryName = base + f.getFileName().toString(); TarArchiveEntry tarEntry = new TarArchiveEntry(entryName); tarEntry.setSize(Files.size(f)); if (entryName.contains("/bin/") || (!usePathPrefix && entryName.startsWith("bin/"))) { if (entryName.endsWith(".bat")) { tarEntry.setMode(0644); } else { tarEntry.setMode(0755); } } tOut.putArchiveEntry(tarEntry); Files.copy(f, tOut); tOut.closeArchiveEntry(); } }
From source file:org.apache.nifi.cluster.flow.impl.DataFlowDaoImpl.java
private void writeTarEntry(final TarArchiveOutputStream tarOut, final String filename, final byte[] bytes) throws IOException { final TarArchiveEntry flowEntry = new TarArchiveEntry(filename); flowEntry.setSize(bytes.length);//from www. j a va 2s .c o m tarOut.putArchiveEntry(flowEntry); tarOut.write(bytes); tarOut.closeArchiveEntry(); }
From source file:org.apache.nifi.util.FlowFilePackagerV1.java
private void writeAttributesEntry(final Map<String, String> attributes, final TarArchiveOutputStream tout) throws IOException { final StringBuilder sb = new StringBuilder(); sb.append(//from w w w .ja v a2 s. c o m "<?xml version=\"1.0\" encoding=\"UTF-8\"?><!DOCTYPE properties\n SYSTEM \"http://java.sun.com/dtd/properties.dtd\">\n"); sb.append("<properties>"); for (final Map.Entry<String, String> entry : attributes.entrySet()) { final String escapedKey = StringEscapeUtils.escapeXml11(entry.getKey()); final String escapedValue = StringEscapeUtils.escapeXml11(entry.getValue()); sb.append("\n <entry key=\"").append(escapedKey).append("\">").append(escapedValue).append("</entry>"); } sb.append("</properties>"); final byte[] metaBytes = sb.toString().getBytes(StandardCharsets.UTF_8); final TarArchiveEntry attribEntry = new TarArchiveEntry(FILENAME_ATTRIBUTES); attribEntry.setMode(tarPermissions); attribEntry.setSize(metaBytes.length); tout.putArchiveEntry(attribEntry); tout.write(metaBytes); tout.closeArchiveEntry(); }
From source file:org.apache.nifi.util.FlowFilePackagerV1.java
private void writeContentEntry(final TarArchiveOutputStream tarOut, final InputStream inStream, final long fileSize) throws IOException { final TarArchiveEntry entry = new TarArchiveEntry(FILENAME_CONTENT); entry.setMode(tarPermissions);/* www. java 2s . c o m*/ entry.setSize(fileSize); tarOut.putArchiveEntry(entry); final byte[] buffer = new byte[512 << 10];//512KB int bytesRead = 0; while ((bytesRead = inStream.read(buffer)) != -1) { //still more data to read if (bytesRead > 0) { tarOut.write(buffer, 0, bytesRead); } } copy(inStream, tarOut); tarOut.closeArchiveEntry(); }
From source file:org.apache.openejb.maven.plugin.BuildTomEEMojo.java
private void tarGz(final TarArchiveOutputStream tarGz, final File f, final String prefix) throws IOException { final String path = f.getPath().replace(prefix, "").replace(File.separator, "/"); final TarArchiveEntry archiveEntry = new TarArchiveEntry(f, path); if (isSh(path)) { archiveEntry.setMode(0755);/*w ww.j a va 2 s .c o m*/ } tarGz.putArchiveEntry(archiveEntry); if (f.isDirectory()) { tarGz.closeArchiveEntry(); final File[] files = f.listFiles(); if (files != null) { for (final File child : files) { tarGz(tarGz, child, prefix); } } } else { IO.copy(f, tarGz); tarGz.closeArchiveEntry(); } }
From source file:org.apache.reef.runtime.mesos.driver.REEFScheduler.java
private String getReefTarUri(final String jobIdentifier) { try {/* w ww. j a v a 2 s .c o m*/ // Create REEF_TAR final FileOutputStream fileOutputStream = new FileOutputStream(REEF_TAR); final TarArchiveOutputStream tarArchiveOutputStream = new TarArchiveOutputStream( new GZIPOutputStream(fileOutputStream)); final File globalFolder = new File(this.fileNames.getGlobalFolderPath()); final DirectoryStream<Path> directoryStream = Files.newDirectoryStream(globalFolder.toPath()); for (final Path path : directoryStream) { tarArchiveOutputStream.putArchiveEntry( new TarArchiveEntry(path.toFile(), globalFolder + "/" + path.getFileName())); final BufferedInputStream bufferedInputStream = new BufferedInputStream( new FileInputStream(path.toFile())); IOUtils.copy(bufferedInputStream, tarArchiveOutputStream); bufferedInputStream.close(); tarArchiveOutputStream.closeArchiveEntry(); } directoryStream.close(); tarArchiveOutputStream.close(); fileOutputStream.close(); // Upload REEF_TAR to HDFS final FileSystem fileSystem = FileSystem.get(new Configuration()); final org.apache.hadoop.fs.Path src = new org.apache.hadoop.fs.Path(REEF_TAR); final String reefTarUriValue = fileSystem.getUri().toString() + this.jobSubmissionDirectoryPrefix + "/" + jobIdentifier + "/" + REEF_TAR; final org.apache.hadoop.fs.Path dst = new org.apache.hadoop.fs.Path(reefTarUriValue); fileSystem.copyFromLocalFile(src, dst); return reefTarUriValue; } catch (final IOException e) { throw new RuntimeException(e); } }