List of usage examples for org.apache.commons.compress.archivers.tar TarArchiveOutputStream LONGFILE_GNU
int LONGFILE_GNU
To view the source code for org.apache.commons.compress.archivers.tar TarArchiveOutputStream LONGFILE_GNU.
Click Source Link
From source file:com.st.maven.debian.DebianPackageMojo.java
private void fillControlTar(Config config, ArFileOutputStream output) throws MojoExecutionException { TarArchiveOutputStream tar = null;//from www . j av a 2 s . c o m try { tar = new TarArchiveOutputStream(new GZIPOutputStream(new ArWrapper(output))); tar.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU); TarArchiveEntry rootDir = new TarArchiveEntry("./"); tar.putArchiveEntry(rootDir); tar.closeArchiveEntry(); byte[] controlData = processTemplate(freemarkerConfig, config, "control.ftl"); TarArchiveEntry controlEntry = new TarArchiveEntry("./control"); controlEntry.setSize(controlData.length); tar.putArchiveEntry(controlEntry); tar.write(controlData); tar.closeArchiveEntry(); byte[] preinstBaseData = processTemplate("preinst", freemarkerConfig, config, combine("preinst.ftl", BASE_DIR + File.separator + "preinst", false)); long size = preinstBaseData.length; TarArchiveEntry preinstEntry = new TarArchiveEntry("./preinst"); preinstEntry.setSize(size); preinstEntry.setMode(0755); tar.putArchiveEntry(preinstEntry); tar.write(preinstBaseData); tar.closeArchiveEntry(); byte[] postinstBaseData = processTemplate("postinst", freemarkerConfig, config, combine("postinst.ftl", BASE_DIR + File.separator + "postinst", true)); size = postinstBaseData.length; TarArchiveEntry postinstEntry = new TarArchiveEntry("./postinst"); postinstEntry.setSize(size); postinstEntry.setMode(0755); tar.putArchiveEntry(postinstEntry); tar.write(postinstBaseData); tar.closeArchiveEntry(); byte[] prermBaseData = processTemplate("prerm", freemarkerConfig, config, combine("prerm.ftl", BASE_DIR + File.separator + "prerm", false)); size = prermBaseData.length; TarArchiveEntry prermEntry = new TarArchiveEntry("./prerm"); prermEntry.setSize(size); prermEntry.setMode(0755); tar.putArchiveEntry(prermEntry); tar.write(prermBaseData); tar.closeArchiveEntry(); byte[] postrmBaseData = processTemplate("postrm", freemarkerConfig, config, combine("postrm.ftl", BASE_DIR + File.separator + "postrm", false)); size = postrmBaseData.length; TarArchiveEntry postrmEntry = new TarArchiveEntry("./postrm"); postrmEntry.setSize(size); postrmEntry.setMode(0755); tar.putArchiveEntry(postrmEntry); tar.write(postrmBaseData); tar.closeArchiveEntry(); } catch (Exception e) { throw new MojoExecutionException("unable to create control tar", e); } finally { if (tar != null) { try { tar.close(); } catch (IOException e) { getLog().error("unable to finish tar", e); } } } }
From source file:gdt.data.entity.ArchiveHandler.java
/** * Compress the database into the tgz archive file. * @param entigrator entigrator instance * @param locator$ container of arguments in the string form. * @return true if success false otherwise. *///from www .ja v a 2 s .co m public boolean compressDatabaseToTgz(Entigrator entigrator, String locator$) { try { Properties locator = Locator.toProperties(locator$); archiveType$ = locator.getProperty(ARCHIVE_TYPE); archiveFile$ = locator.getProperty(ARCHIVE_FILE); String tgzFile$ = archiveFile$; File tgzFile = new File(tgzFile$); if (!tgzFile.exists()) tgzFile.createNewFile(); // String userHome$=System.getProperty("user.home"); File tarFile = new File(tgzFile$.replace(".tgz", "") + ".tar"); if (!tarFile.exists()) tarFile.createNewFile(); TarArchiveOutputStream aos = (TarArchiveOutputStream) new ArchiveStreamFactory() .createArchiveOutputStream("tar", new FileOutputStream(tarFile)); aos.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU); String entihome$ = entigrator.getEntihome(); append(entigrator, entihome$, entihome$, aos); aos.close(); compressGzipFile(tarFile.getPath(), tgzFile.getPath()); tarFile.delete(); return true; } catch (Exception e) { Logger.getLogger(getClass().getName()).severe(e.toString()); return false; } }
From source file:gdt.data.entity.ArchiveHandler.java
/** * Compress the entities into the tar archive file. * @param entigrator entigrator instance * @param locator$ container of arguments in the string form. * @return true if success false otherwise. *//* w w w .j a v a 2 s . co m*/ public boolean compressEntitiesToTar(Entigrator entigrator, String locator$) { try { // System.out.println("ArchiveHandler:compressEntitiesToTar:locator="+locator$); Properties locator = Locator.toProperties(locator$); archiveType$ = locator.getProperty(ARCHIVE_TYPE); archiveFile$ = locator.getProperty(ARCHIVE_FILE); String entityList$ = locator.getProperty(EntityHandler.ENTITY_LIST); String[] sa = Locator.toArray(entityList$); System.out.println("ArchiveHandler:compressEntitiesToTar:sa=" + sa.length); String tarfile$ = archiveFile$; File tarfile = new File(tarfile$); if (!tarfile.exists()) tarfile.createNewFile(); String entityBody$ = null; String entityHome$ = null; TarArchiveOutputStream aos = (TarArchiveOutputStream) new ArchiveStreamFactory() .createArchiveOutputStream("tar", new FileOutputStream(tarfile$)); aos.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU); String entihome$ = entigrator.getEntihome(); String entitiesHome$ = entihome$ + "/" + Entigrator.ENTITY_BASE + "/data/"; // System.out.println("ArchiveHandler:append:entities home=" + entitiesHome$); String iconsHome$ = entihome$ + "/" + Entigrator.ICONS + "/"; String icon$; for (String aSa : sa) { entityBody$ = entitiesHome$ + aSa; append(entigrator, entigrator.getEntihome(), entityBody$, aos); entityHome$ = entigrator.ent_getHome(aSa); if (new File(entityHome$).exists()) { append(entigrator, entigrator.getEntihome(), entityHome$, aos); } icon$ = entigrator.indx_getIcon(aSa); if (icon$ != null) append(entigrator, entigrator.getEntihome(), iconsHome$ + icon$, aos); } aos.close(); return true; } catch (Exception e) { LOGGER.severe(e.toString()); return false; } }
From source file:gdt.data.entity.ArchiveHandler.java
/** * Compress the entities into the tgz archive file. * @param entigrator entigrator instance * @param locator$ container of arguments in the string form. * @return true if success false otherwise. *///from w w w. ja v a2 s . co m public boolean compressEntitiesToTgz(Entigrator entigrator, String locator$) { try { Properties locator = Locator.toProperties(locator$); archiveType$ = locator.getProperty(ARCHIVE_TYPE); archiveFile$ = locator.getProperty(ARCHIVE_FILE); String entityList$ = locator.getProperty(EntityHandler.ENTITY_LIST); String[] sa = Locator.toArray(entityList$); String tgzFile$ = archiveFile$; File tgzFile = new File(tgzFile$); if (!tgzFile.exists()) tgzFile.createNewFile(); // String userHome$=System.getProperty("user.home"); File tarFile = new File(tgzFile$.replace(".tgz", "") + ".tar"); if (!tarFile.exists()) tarFile.createNewFile(); String entityBody$ = null; String entityHome$ = null; TarArchiveOutputStream aos = (TarArchiveOutputStream) new ArchiveStreamFactory() .createArchiveOutputStream("tar", new FileOutputStream(tarFile)); aos.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU); String entihome$ = entigrator.getEntihome(); String entitiesHome$ = entihome$ + "/" + Entigrator.ENTITY_BASE + "/data/"; String iconsHome$ = entihome$ + "/" + Entigrator.ICONS + "/"; String icon$; for (String aSa : sa) { entityBody$ = entitiesHome$ + aSa; append(entigrator, entigrator.getEntihome(), entityBody$, aos); entityHome$ = entigrator.ent_getHome(aSa); if (new File(entityHome$).exists()) append(entigrator, entigrator.getEntihome(), entityHome$, aos); icon$ = entigrator.indx_getIcon(aSa); if (icon$ != null) append(entigrator, entigrator.getEntihome(), iconsHome$ + icon$, aos); } aos.close(); compressGzipFile(tarFile.getPath(), tgzFile.getPath()); tarFile.delete(); return true; } catch (Exception e) { LOGGER.severe(e.toString()); return false; } }
From source file:bb.io.TarUtil.java
/** * Writes each element of pathsToArchive to a new TAR format archive file specified by tarFile. * If any element is a directory, the entire contents of its directory tree will be archived (as limited by filter). * Paths that would otherwise be archived may be screened out by supplying a non null value for filter. * <p>/*from w ww . j ava 2 s .c o m*/ * Altho this method does not use {@link DirUtil#getTree DirUtil.getTree}, * it uses filter to control subdirectory exploration in a similar manner. * <p> * In general, the path stored in the archive * is the path relative to the <i>parent</i> of the relevant element of pathsToArchive. * For example, suppose that some element of pathsToArchive corresponds to <code>D:/someDirectory</code>, * and suppose that that directory contains the subdirectory and child file <code>D:/someDirectory/anotherDirectory/childFile</code>. * Then the paths stored in the archive are <code>anotherDirectory</code> and <code>anotherDirectory/childFile</code> respectively. * <p> * One complication with the above scheme is paths which are file system roots: they have no parents. * Examples include the windows path <code>C:</code> or the unix path <code>/</code>. * In cases like these, this method uses an imaginary parent name of the form <code>rootXXX</code> (where XXX is an integer). * For example, on a windows machine, if pathsToArchive contains the paths <code>C:</code> and <code>D:</code>, * then the contents of <code>C:</code> might be stored in the archive * with a path that starts with <code>root1</code>, and the contents of <code>D:</code> * may have an archive path that starts with <code>root2</code>. * This behavior ensures that the archive preserves the separate origins of the 2 sources, * which is necessary so that they do not get mixed when extracted. * <p> * The TAR archive witten by this method will use GNU TAR rules for the entry headers if long path names are encountered. * <i>This means that standard POSIX compliant programs that do not support the GNU TAR extension * will be unable to extract the contents.</i> * <p> * Optional GZIP compression may also be done. * Normally, tarFile must be a path which ends in a ".tar" (case insensitive) extension. * However, this method will also accept either ".tar.gz" or ".tgz" extensions, * in which case it will perform GZIP compression on tarFile as part of archiving. * <p> * @param tarFile the TAR File that will write the archive data to * @param filter a FileFilter that can use to screen out paths from being written to the archive; * may be null, which means everything inside pathsToArchive gets archived; * if not null, see warnings in {@link DirUtil#getTree DirUtil.getTree} on directory acceptance * @param pathsToArchive array of all the paths to archive * @throws Exception if any Throwable is caught; the Throwable is stored as the cause, and the message stores the path of tarFile; * here are some of the possible causes: * <ol> * <li> * IllegalArgumentException if pathsToArchive == null; pathsToArchive.length == 0; * tarFile == null; * tarFile already exists and either is not a normal file or is but already has data inside it; * tarFile has an invalid extension; * any element of pathsToArchive is null, does not exist, cannot be read, is equal to tarFile, its path contains tarFile, * or it fails {@link #isTarable isTarable} * </li> * <li>SecurityException if a security manager exists and its SecurityManager.checkRead method denies read access to some path</li> * <li>IOException if an I/O problem occurs</li> * </ol> */ public static void archive(File tarFile, FileFilter filter, File... pathsToArchive) throws Exception { try { Check.arg().notNull(tarFile); if (tarFile.exists()) { if (!tarFile.isFile()) throw new IllegalArgumentException( "tarFile = " + tarFile.getPath() + " exists but is not a normal file"); if (tarFile.length() != 0) throw new IllegalArgumentException("tarFile = " + tarFile.getPath() + " already exists and already has data inside it; this method will not overwrite it"); } Check.arg().notEmpty(pathsToArchive); for (int i = 0; i < pathsToArchive.length; i++) { Check.arg().notNull(pathsToArchive[i]); if (!pathsToArchive[i].exists()) throw new IllegalArgumentException("pathsToArchive[" + i + "] = " + pathsToArchive[i].getPath() + " is a non-existent path"); if (!pathsToArchive[i].canRead()) throw new IllegalArgumentException("pathsToArchive[" + i + "] = " + pathsToArchive[i].getPath() + " cannot be read by this application"); if (pathsToArchive[i].equals(tarFile)) throw new IllegalArgumentException("pathsToArchive[" + i + "] = " + pathsToArchive[i].getPath() + " is the same path as tarFile = " + tarFile.getPath()); if (pathsToArchive[i].isDirectory() && DirUtil.contains(pathsToArchive[i], tarFile)) throw new IllegalArgumentException("the directory corresponding to pathsToArchive[" + i + "] = " + pathsToArchive[i].getCanonicalPath() + " will contain the path of tarFile = " + tarFile.getCanonicalPath()); } TarArchiveOutputStream taos = null; try { taos = new TarArchiveOutputStream(getOutputStream(tarFile)); taos.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU); for (File path : pathsToArchive) { archive(path, new FileParent(path), taos, filter); } } finally { if (giveUserFeedback) ConsoleUtil.eraseLine(); StreamUtil.close(taos); } } catch (Throwable t) { throw new Exception("See cause for the underlying Throwable; happened for tarFile = " + (tarFile != null ? tarFile.getPath() : "<null>"), t); } }
From source file:lucee.commons.io.compress.CompressUtil.java
public static void compressTar(Resource[] sources, OutputStream target, int mode) throws IOException { if (target instanceof TarArchiveOutputStream) { compressTar("", sources, (TarArchiveOutputStream) target, mode); return;/*w w w . j a va 2 s . co m*/ } TarArchiveOutputStream tos = new TarArchiveOutputStream(target); tos.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU); try { compressTar("", sources, tos, mode); } finally { IOUtil.closeEL(tos); } }
From source file:lk.score.androphsy.main.NewCase.java
private void compressFiles(ArrayList<File> list, File outFile) throws IOException { FileOutputStream fos = new FileOutputStream(outFile); BufferedOutputStream bos = new BufferedOutputStream(fos); GZIPOutputStream gos = new GZIPOutputStream(bos); TarArchiveOutputStream taos = new TarArchiveOutputStream(gos); taos.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_STAR); taos.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU); for (File f : list) { addFileToCompression(taos, f, ""); }/*from ww w.ja v a2 s. c o m*/ taos.close(); fos.close(); // gos.close(); // bos.close(); }
From source file:gov.nih.nci.ncicb.tcga.dcc.dam.processors.FilePackager.java
TarArchiveOutputStream makeTarGzOutputStream(final File archiveFile) throws IOException { final TarArchiveOutputStream tarArchiveOutputStream = new TarArchiveOutputStream( new GZIPOutputStream(new FileOutputStream(archiveFile))); tarArchiveOutputStream.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU); tarArchiveOutputStream.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_STAR); return tarArchiveOutputStream; }
From source file:frameworks.Masken.java
public static void createTarGzip(String dirPath, String tarGzPath) throws IOException { File inputDirectoryPath = new File(dirPath); File outputFile = new File(tarGzPath); try (FileOutputStream fileOutputStream = new FileOutputStream(outputFile); BufferedOutputStream bufferedOutputStream = new BufferedOutputStream(fileOutputStream); GzipCompressorOutputStream gzipOutputStream = new GzipCompressorOutputStream(bufferedOutputStream); TarArchiveOutputStream tarArchiveOutputStream = new TarArchiveOutputStream(gzipOutputStream)) { tarArchiveOutputStream.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_POSIX); tarArchiveOutputStream.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU); List<File> files = new ArrayList<>(FileUtils.listFiles(inputDirectoryPath, new RegexFileFilter("^(.*?)"), DirectoryFileFilter.DIRECTORY)); for (int i = 0; i < files.size(); i++) { File currentFile = files.get(i); if (!currentFile.getName().contains(".tgz")) { String relativeFilePath = new File(inputDirectoryPath.toURI()).toURI() .relativize(new File(currentFile.getAbsolutePath()).toURI()).getPath(); TarArchiveEntry tarEntry = new TarArchiveEntry(currentFile, relativeFilePath); tarEntry.setSize(currentFile.length()); tarArchiveOutputStream.putArchiveEntry(tarEntry); FileInputStream in = new FileInputStream(currentFile); //tarArchiveOutputStream.write(IOUtils.toByteArray(new FileInputStream(currentFile))); tarArchiveOutputStream.write(IOUtils.toByteArray(in)); tarArchiveOutputStream.closeArchiveEntry(); in.close();/*from w ww . ja v a 2s . co m*/ } } tarArchiveOutputStream.close(); } }
From source file:org.apache.ant.compress.taskdefs.Tar.java
public Tar() { setFactory(new TarStreamFactory() { public ArchiveOutputStream getArchiveStream(OutputStream stream, String encoding) throws IOException { TarArchiveOutputStream o = (TarArchiveOutputStream) super.getArchiveStream(stream, encoding); if (format.equals(Format.OLDGNU)) { o.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU); } else if (format.equals(Format.GNU)) { o.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU); o.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_STAR); } else if (format.equals(Format.STAR)) { o.setLongFileMode(TarArchiveOutputStream.LONGFILE_POSIX); o.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_STAR); } else if (format.equals(Format.PAX)) { o.setLongFileMode(TarArchiveOutputStream.LONGFILE_POSIX); o.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_POSIX); o.setAddPaxHeadersForNonAsciiNames(true); }// www . j ava 2 s .c o m return o; } }); setEntryBuilder(new ArchiveBase.EntryBuilder() { public ArchiveEntry buildEntry(ArchiveBase.ResourceWithFlags r) { boolean isDir = r.getResource().isDirectory(); String name = r.getName(); if (isDir && !name.endsWith("/")) { name += "/"; } else if (!isDir && name.endsWith("/")) { name = name.substring(0, name.length() - 1); } TarArchiveEntry ent = new TarArchiveEntry(name, getPreserveLeadingSlashes()); ent.setModTime(round(r.getResource().getLastModified(), 1000)); ent.setSize(isDir ? 0 : r.getResource().getSize()); if (!isDir && r.getCollectionFlags().hasModeBeenSet()) { ent.setMode(r.getCollectionFlags().getMode()); } else if (isDir && r.getCollectionFlags().hasDirModeBeenSet()) { ent.setMode(r.getCollectionFlags().getDirMode()); } else if (r.getResourceFlags().hasModeBeenSet()) { ent.setMode(r.getResourceFlags().getMode()); } else { ent.setMode(isDir ? ArchiveFileSet.DEFAULT_DIR_MODE : ArchiveFileSet.DEFAULT_FILE_MODE); } if (r.getResourceFlags().hasUserIdBeenSet()) { ent.setUserId(r.getResourceFlags().getUserId()); } else if (r.getCollectionFlags().hasUserIdBeenSet()) { ent.setUserId(r.getCollectionFlags().getUserId()); } if (r.getResourceFlags().hasGroupIdBeenSet()) { ent.setGroupId(r.getResourceFlags().getGroupId()); } else if (r.getCollectionFlags().hasGroupIdBeenSet()) { ent.setGroupId(r.getCollectionFlags().getGroupId()); } if (r.getResourceFlags().hasUserNameBeenSet()) { ent.setUserName(r.getResourceFlags().getUserName()); } else if (r.getCollectionFlags().hasUserNameBeenSet()) { ent.setUserName(r.getCollectionFlags().getUserName()); } if (r.getResourceFlags().hasGroupNameBeenSet()) { ent.setGroupName(r.getResourceFlags().getGroupName()); } else if (r.getCollectionFlags().hasGroupNameBeenSet()) { ent.setGroupName(r.getCollectionFlags().getGroupName()); } return ent; } }); setFileSetBuilder(new ArchiveBase.FileSetBuilder() { public ArchiveFileSet buildFileSet(Resource dest) { ArchiveFileSet afs = new TarFileSet(); afs.setSrcResource(dest); return afs; } }); }