List of usage examples for org.apache.commons.compress.archivers ArchiveEntry isDirectory
public boolean isDirectory();
From source file:eu.ensure.packproc.ip.PackageProcessor.java
/** * Generic entry to the information package-processor. * <p>/*w w w .ja va 2 s. c o m*/ * Will route to more specific actions based on the plugin-specific configuration. * <p> * @param name - name of entity (information package) * @param inputStream - input stream onto information package * @param outputStream - [optionally] output stream onto (new) information package * @param context - a context for this processor * @throws IOException - if file I/O fails * @throws ArchiveException - if information package has unknown packaging format * @throws ProcessorException - if processing of information package fails * @throws ClassNotFoundException - if action not found */ public void process(String name, InputStream inputStream, OutputStream outputStream, ProcessorContext context) throws IOException, ArchiveException, ProcessorException, ClassNotFoundException { BasicProcessorContext basicContext = context.push(new BasicProcessorContext(name)); boolean isMutableCall = null != outputStream; ArchiveInputStream archiveInputStream = null; PackageOutputStream archiveOutputStream = null; try { // Package readers and writers archiveInputStream = factory.createArchiveInputStream(new BufferedInputStream(inputStream)); if (isMutableCall) { archiveOutputStream = PackageOutputStream.createOutputStreamFrom(archiveInputStream, outputStream); } // Iterate through objects in the input package ArchiveEntry archiveEntry = null; with_next_entry: while ((archiveEntry = archiveInputStream.getNextEntry()) != null) { String entryName = archiveEntry.getName(); if (archiveEntry.isDirectory()) { entryName += "/"; } if (log.isInfoEnabled()) { log.info(""); String info = "### " + entryName; long size = archiveEntry.getSize(); info += " (~" + Number.asHumanApproximate(size) + " or " + size + " bytes)"; log.info(info); } // TODO: Triggers for "/" will have to be processed manually here! MultiDigestInputStream entryInputStream = null; try { PackageEntry structureEntry = new PackageEntry(archiveEntry); entryInputStream = new MultiDigestInputStream(archiveInputStream); // As it happens to be! // Directories are not processed per se Iterator<Action> ait = actions.iterator(); while (ait.hasNext()) { Action action = ait.next(); if (action.match(structureEntry.getName())) { if (log.isDebugEnabled()) { log.debug(me() + ":process container"); } Processor processor = action.getProcessor(); if (processor instanceof ContainerStructureProcessor) { if (action.getMethod().equalsIgnoreCase("process")) { //----------------------------------------------------------------------------- // Since we are referring to a structure (processor), we are probably just // going to process an embedded TAR-file (or the like). We create a // temporary file and recursively feed it to the processor manager... //----------------------------------------------------------------------------- File subInputFile = extractEntry(structureEntry, entryInputStream); File subOutputFile = null; if (isMutableCall) { subOutputFile = File.createTempFile("temporary-processed", ".package"); } try { InputStream subInputStream = null; OutputStream subOutputStream = null; try { subInputStream = new BufferedInputStream( new FileInputStream(subInputFile)); if (isMutableCall) { subOutputStream = new BufferedOutputStream( new FileOutputStream(subOutputFile)); } // Run it through the processor manager which knows what to do with it manager.applyOnContainerWithStructure(action.getProcessor(), action.getMethod(), structureEntry.getName(), subInputStream, subOutputStream, basicContext); } finally { if (null != subInputStream) subInputStream.close(); if (null != subOutputStream) subOutputStream.close(); } if (isMutableCall) { // Add the temporary file to the output stream instead of the original addEntry(subOutputFile, structureEntry, archiveOutputStream); } } finally { if (null != subInputFile && subInputFile.exists()) subInputFile.delete(); if (null != subOutputFile && subOutputFile.exists()) subOutputFile.delete(); } continue with_next_entry; // since we operated on a unique entry } else { // Unknown operation on a container file throw new ProcessorException( "Unknown action on container: " + action.getMethod()); } } else if (processor instanceof FileProcessor) { //--------------------------------------------------------------------------------- // Since we are referring to a file processor, we will just pass the entry with it's // input stream back to the processor manager that will know what to do with it. //--------------------------------------------------------------------------------- manager.applyOnEntry(action.getProcessor(), action.getMethod(), structureEntry, entryInputStream, archiveOutputStream, basicContext); continue with_next_entry; // since we operated on a unique entry } } } if (isMutableCall && !addedEntries.contains(structureEntry.getName())) { // We may safely copy file copyEntry(structureEntry, entryInputStream, archiveOutputStream); } } finally { /* * Don't close the entryInputStream! It is just a reference to the archiveInputStream * which we want to continue operating upon. */ if (!archiveEntry.isDirectory()) { // Collect bitstream information - this is where we associate _actual_ values, // i.e. calculated checksums and calculated byte lengths. Map<String, String> bitstreamInfo = new HashMap<String, String>(); // OBSERVE: The following might not be completely valid in all circumstances, // as InputStream.getSize() only returns the number of bytes that you can read // and not necessarily the number of bytes in the stream. But in this case, // I believe it to be valid... if (entryInputStream.getSize() > 0) { bitstreamInfo.put("size", "" + entryInputStream.getSize()); Map<String, byte[]> digests = entryInputStream.getDigests(); for (String key : digests.keySet()) { byte[] digest = digests.get(key); if (digest.length == 8) { ByteBuffer buf = ByteBuffer.wrap(digest); String value = "" + buf.getLong(); bitstreamInfo.put(key, value); } else { StringBuffer hexString = new StringBuffer(); for (int i = 0; i < digest.length; i++) { hexString.append(Integer.toHexString(0xFF & digest[i])); } String value = hexString.toString(); bitstreamInfo.put(key, value); } } // Create a package-relative path... File top = new File("/"); File contentStream = top; // starting point relative to top // ...and reassemble int start = entryName.startsWith("/") ? 0 : 1; /* skip [example1]/content/... */ String[] parts = entryName.split("/"); for (int i = start; i < parts.length; i++) { contentStream = new File(contentStream, parts[i]); } bitstreamInfo.put("fileName", parts[parts.length - 1]); String path = contentStream.getPath().replace("\\", "/"); // in case we're on Windoze context.associate("CALCULATED", path, path, bitstreamInfo); } } } } } finally { if (null != archiveOutputStream) archiveOutputStream.close(); if (null != archiveInputStream) archiveInputStream.close(); context.pop(); } }
From source file:io.fabric8.spi.process.AbstractProcessHandler.java
@Override public final ManagedProcess create(AgentRegistration agentReg, ProcessOptions options, ProcessIdentity identity) {//from w w w. ja v a 2 s . c om File targetDir = options.getTargetPath().toAbsolutePath().toFile(); IllegalStateAssertion.assertTrue(targetDir.isDirectory() || targetDir.mkdirs(), "Cannot create target dir: " + targetDir); File homeDir = null; for (MavenCoordinates artefact : options.getMavenCoordinates()) { Resource resource = mavenRepository.findMavenResource(artefact); IllegalStateAssertion.assertNotNull(resource, "Cannot find maven resource: " + artefact); ResourceContent content = resource.adapt(ResourceContent.class); IllegalStateAssertion.assertNotNull(content, "Cannot obtain resource content for: " + artefact); try { ArchiveInputStream ais; if ("tar.gz".equals(artefact.getType())) { InputStream inputStream = content.getContent(); ais = new TarArchiveInputStream(new GZIPInputStream(inputStream)); } else { InputStream inputStream = content.getContent(); ais = new ArchiveStreamFactory().createArchiveInputStream(artefact.getType(), inputStream); } ArchiveEntry entry = null; boolean needContainerHome = homeDir == null; while ((entry = ais.getNextEntry()) != null) { File targetFile; if (needContainerHome) { targetFile = new File(targetDir, entry.getName()); } else { targetFile = new File(homeDir, entry.getName()); } if (!entry.isDirectory()) { File parentDir = targetFile.getParentFile(); IllegalStateAssertion.assertTrue(parentDir.exists() || parentDir.mkdirs(), "Cannot create target directory: " + parentDir); FileOutputStream fos = new FileOutputStream(targetFile); copyStream(ais, fos); fos.close(); if (needContainerHome && homeDir == null) { File currentDir = parentDir; while (!currentDir.getParentFile().equals(targetDir)) { currentDir = currentDir.getParentFile(); } homeDir = currentDir; } } } ais.close(); } catch (RuntimeException rte) { throw rte; } catch (Exception ex) { throw new IllegalStateException("Cannot extract artefact: " + artefact, ex); } } managedProcess = new DefaultManagedProcess(identity, options, homeDir.toPath(), State.CREATED); managedProcess.addAttribute(ManagedProcess.ATTRIBUTE_KEY_AGENT_REGISTRATION, agentReg); managedProcess.addAttribute(ContainerAttributes.ATTRIBUTE_KEY_AGENT_JMX_SERVER_URL, agentReg.getJmxServerUrl()); managedProcess.addAttribute(ContainerAttributes.ATTRIBUTE_KEY_AGENT_JMX_USERNAME, agentReg.getJmxUsername()); managedProcess.addAttribute(ContainerAttributes.ATTRIBUTE_KEY_AGENT_JMX_PASSWORD, agentReg.getJmxPassword()); try { doConfigure(managedProcess); } catch (Exception ex) { throw new LifecycleException("Cannot configure container", ex); } return new ImmutableManagedProcess(managedProcess); }
From source file:br.com.thiaguten.archive.AbstractArchive.java
/** * Generic decompress implemetation/*from www . ja v a 2 s.co m*/ */ @Override public Path decompress(Path path) throws IOException { Path decompressDir = removeExtension(path); logger.debug("reading archive file " + path); try (ArchiveInputStream archiveInputStream = createArchiveInputStream( new BufferedInputStream(newInputStream(path)))) { // creates a new decompress folder to not override if already exists // if you do not want this behavior, just comment this line decompressDir = createFile(ArchiveAction.DECOMPRESS, decompressDir.getParent(), decompressDir); createDirectories(decompressDir); logger.debug("creating the decompress destination directory " + decompressDir); ArchiveEntry entry; while ((entry = archiveInputStream.getNextEntry()) != null) { if (archiveInputStream.canReadEntryData(entry)) { final String entryName = entry.getName(); final Path target = Paths.get(decompressDir.toString(), entryName); final Path parent = target.getParent(); if (parent != null && !exists(parent)) { createDirectories(parent); } logger.debug("reading compressed path " + entryName); if (!entry.isDirectory()) { try (OutputStream outputStream = new BufferedOutputStream(newOutputStream(target))) { logger.debug("writting compressed " + entryName + " file in the decompress directory"); // byte[] content = new byte[(int) entry.getSize()]; // outputStream.write(content); IOUtils.copy(archiveInputStream, outputStream); } } } } logger.debug("finishing the decompress in the directory: " + decompressDir); } return decompressDir; }
From source file:autoupdater.FileDAO.java
public File inflateArchive(File source, File destination) throws IOException, ArchiveException { ArchiveInputStream stream = new ArchiveStreamFactory() .createArchiveInputStream(new BufferedInputStream(new FileInputStream(source))); ZipFile zipfile = new ZipFile(source); ArchiveEntry entry = null; FileOutputStream dest = null; InputStream inStream = null;/* w w w.j av a 2 s . com*/ if ((entry = stream.getNextEntry()) != null) { do { File destFile = new File(destination, entry.getName()); if (!destFile.getParentFile().exists()) { if (!destFile.getParentFile().mkdirs()) { throw new IOException("could not create the folders to unzip in"); } } if (!entry.isDirectory()) { try { dest = new FileOutputStream(destFile); inStream = stream; IOUtils.copyLarge(inStream, dest); } finally { if (dest != null) { dest.close(); } if (inStream != null) { inStream.close(); } } } else { if (!destFile.exists()) { if (!destFile.mkdirs()) { throw new IOException("could not create folders to unzip file"); } } } } while ((entry = stream.getNextEntry()) != null); } return destination; }
From source file:freenet.client.ArchiveManager.java
private void handleTARArchive(ArchiveStoreContext ctx, FreenetURI key, InputStream data, String element, ArchiveExtractCallback callback, MutableBoolean gotElement, boolean throwAtExit, ClientContext context) throws ArchiveFailureException, ArchiveRestartException { if (logMINOR) Logger.minor(this, "Handling a TAR Archive"); TarArchiveInputStream tarIS = null;/*from w ww . java 2 s . c o m*/ try { tarIS = new TarArchiveInputStream(data); // MINOR: Assumes the first entry in the tarball is a directory. ArchiveEntry entry; byte[] buf = new byte[32768]; HashSet<String> names = new HashSet<String>(); boolean gotMetadata = false; outerTAR: while (true) { try { entry = tarIS.getNextEntry(); } catch (IllegalArgumentException e) { // Annoyingly, it can throw this on some corruptions... throw new ArchiveFailureException("Error reading archive: " + e.getMessage(), e); } if (entry == null) break; if (entry.isDirectory()) continue; String name = stripLeadingSlashes(entry.getName()); if (names.contains(name)) { Logger.error(this, "Duplicate key " + name + " in archive " + key); continue; } long size = entry.getSize(); if (name.equals(".metadata")) gotMetadata = true; if (size > maxArchivedFileSize && !name.equals(element)) { addErrorElement( ctx, key, name, "File too big: " + size + " greater than current archived file size limit " + maxArchivedFileSize, true); } else { // Read the element long realLen = 0; Bucket output = tempBucketFactory.makeBucket(size); OutputStream out = output.getOutputStream(); try { int readBytes; while ((readBytes = tarIS.read(buf)) > 0) { out.write(buf, 0, readBytes); readBytes += realLen; if (readBytes > maxArchivedFileSize) { addErrorElement(ctx, key, name, "File too big: " + maxArchivedFileSize + " greater than current archived file size limit " + maxArchivedFileSize, true); out.close(); out = null; output.free(); continue outerTAR; } } } finally { if (out != null) out.close(); } if (size <= maxArchivedFileSize) { addStoreElement(ctx, key, name, output, gotElement, element, callback, context); names.add(name); trimStoredData(); } else { // We are here because they asked for this file. callback.gotBucket(output, context); gotElement.value = true; addErrorElement( ctx, key, name, "File too big: " + size + " greater than current archived file size limit " + maxArchivedFileSize, true); } } } // If no metadata, generate some if (!gotMetadata) { generateMetadata(ctx, key, names, gotElement, element, callback, context); trimStoredData(); } if (throwAtExit) throw new ArchiveRestartException("Archive changed on re-fetch"); if ((!gotElement.value) && element != null) callback.notInArchive(context); } catch (IOException e) { throw new ArchiveFailureException("Error reading archive: " + e.getMessage(), e); } finally { Closer.close(tarIS); } }
From source file:com.mobilesorcery.sdk.builder.linux.PackageParser.java
/** * Extracts a package template and parses and replaces variables * in filenames and files.//from w w w . j a v a 2s. co m * * @param o Output directory * @param i Input file * * @throws Exception If recursion is too deep, a variable isn't defined or * malformed meta data * @throws IOException Error reading inputstream * @throws ParseException Malformed JSON * @throws FileNotFoundException Could not open input file */ public void doProcessTarGZip(File o, File i) throws Exception, IOException, ParseException, FileNotFoundException { FileInputStream fis = new FileInputStream(i); GZIPInputStream gis = new GZIPInputStream(fis); TarArchiveInputStream tis = new TarArchiveInputStream(gis); // Remove any old data if any if (o.exists() == true) o.delete(); // Find and parse meta data, this should always be the // first file, but it can' be assumed while (true) { ArchiveEntry e = tis.getNextEntry(); if (e == null) break; if (e.getName().equals(".meta/.meta") == false) continue; doParseMeta(tis); break; } // Reset input tis.close(); gis.close(); fis.close(); fis = new FileInputStream(i); gis = new GZIPInputStream(fis); tis = new TarArchiveInputStream(gis); // Process and extract files while (true) { File f; ArchiveEntry e = tis.getNextEntry(); if (e == null) break; // Check if it's a script that we need to load and parse if (e.getName().contains(".meta") == true) { if (m_scriptMap.containsKey(e.getName()) == true) { String name = m_scriptMap.get(e.getName()); String script = m_varResolver.doParseStream(tis); m_scriptMap.put(name, script); m_scriptMap.remove(e.getName()); } continue; } // Store its permissions String n = m_varResolver.doResolveString(e.getName()); m_filemodeMap.put(n, ((TarArchiveEntry) e).getMode()); // Directory ? f = new File(o, n); if (e.isDirectory() == true) { if (f.exists() == false) f.mkdirs(); continue; } // It's a file if (m_parseSet.contains(e.getName()) == true) m_varResolver.doParseCopyStream(f, tis); else BuilderUtil.getInstance().copyInputStreamToFile(f, tis, e.getSize()); } }
From source file:it.evilsocket.dsploit.core.UpdateService.java
/** * extract an archive into a directory/*from w w w . j a v a 2s.c om*/ * * @throws IOException if some I/O error occurs * @throws java.util.concurrent.CancellationException if task is cancelled by user * @throws java.lang.InterruptedException when the the running thread get cancelled. */ private void extract() throws CancellationException, RuntimeException, IOException, InterruptedException { ArchiveInputStream is = null; ArchiveEntry entry; CountingInputStream counter; File f, inFile; File[] list; String name; FileOutputStream fos = null; byte data[] = new byte[2048]; int mode; int count; long total; short percentage, old_percentage; if (mCurrentTask.path == null || mCurrentTask.outputDir == null) return; mBuilder.setContentTitle(getString(R.string.extracting)).setContentText("").setContentInfo("") .setSmallIcon(android.R.drawable.ic_popup_sync).setProgress(100, 0, false); mNotificationManager.notify(NOTIFICATION_ID, mBuilder.build()); Logger.info(String.format("extracting '%s' to '%s'", mCurrentTask.path, mCurrentTask.outputDir)); try { inFile = new File(mCurrentTask.path); total = inFile.length(); counter = new CountingInputStream(new FileInputStream(inFile)); is = openArchiveStream(counter); old_percentage = -1; f = new File(mCurrentTask.outputDir); if (f.exists() && f.isDirectory() && (list = f.listFiles()) != null && list.length > 2) wipe(); if (is instanceof TarArchiveInputStream && mCurrentTask.modeMap == null) mCurrentTask.modeMap = new HashMap<Integer, String>(); while (mRunning && (entry = is.getNextEntry()) != null) { name = entry.getName().replaceFirst("^\\./?", ""); if (mCurrentTask.dirToExtract != null) { if (!name.startsWith(mCurrentTask.dirToExtract)) continue; else name = name.substring(mCurrentTask.dirToExtract.length()); } f = new File(mCurrentTask.outputDir, name); if (entry.isDirectory()) { if (!f.exists()) { if (!f.mkdirs()) { throw new IOException( String.format("Couldn't create directory '%s'.", f.getAbsolutePath())); } } } else { BufferedOutputStream bof = new BufferedOutputStream(new FileOutputStream(f)); while (mRunning && (count = is.read(data)) != -1) { bof.write(data, 0, count); percentage = (short) (((double) counter.getBytesRead() / total) * 100); if (percentage != old_percentage) { mBuilder.setProgress(100, percentage, false).setContentInfo(percentage + "%"); mNotificationManager.notify(NOTIFICATION_ID, mBuilder.build()); old_percentage = percentage; } } bof.flush(); bof.close(); } // Zip does not store file permissions. if (entry instanceof TarArchiveEntry) { mode = ((TarArchiveEntry) entry).getMode(); if (!mCurrentTask.modeMap.containsKey(mode)) mCurrentTask.modeMap.put(mode, entry.getName() + " "); else mCurrentTask.modeMap.put(mode, mCurrentTask.modeMap.get(mode).concat(entry.getName() + " ")); } } if (!mRunning) throw new CancellationException("extraction cancelled."); Logger.info("extraction completed"); f = new File(mCurrentTask.outputDir, ".nomedia"); if (f.createNewFile()) Logger.info(".nomedia created"); if (mCurrentTask.versionString != null && !mCurrentTask.versionString.isEmpty()) { f = new File(mCurrentTask.outputDir, "VERSION"); fos = new FileOutputStream(f); fos.write(mCurrentTask.versionString.getBytes()); } else Logger.warning("version string not found"); mBuilder.setContentInfo("").setProgress(100, 100, true); mNotificationManager.notify(NOTIFICATION_ID, mBuilder.build()); } finally { if (is != null) is.close(); if (fos != null) fos.close(); } }
From source file:org.apache.ant.compress.resources.CommonsCompressArchiveResource.java
protected void setEntry(ArchiveEntry e) { if (e == null) { setExists(false);/*www .j a v a2 s .c o m*/ return; } setName(e.getName()); setExists(true); setLastModified(e.getLastModifiedDate().getTime()); setDirectory(e.isDirectory()); setSize(e.getSize()); setMode(EntryHelper.getMode(e)); uid = EntryHelper.getUserId(e); gid = EntryHelper.getGroupId(e); }
From source file:org.apache.ant.compress.resources.CommonsCompressArchiveScanner.java
/** * Fills the file and directory maps with resources read from the * archive.//from w ww. ja v a 2s . c o m * * @param src the archive to scan. * @param encoding encoding used to encode file names inside the archive. * @param fileEntries Map (name to resource) of non-directory * resources found inside the archive. * @param matchFileEntries Map (name to resource) of non-directory * resources found inside the archive that matched all include * patterns and didn't match any exclude patterns. * @param dirEntries Map (name to resource) of directory * resources found inside the archive. * @param matchDirEntries Map (name to resource) of directory * resources found inside the archive that matched all include * patterns and didn't match any exclude patterns. */ protected void fillMapsFromArchive(Resource src, String encoding, Map fileEntries, Map matchFileEntries, Map dirEntries, Map matchDirEntries) { ArchiveEntry entry = null; ArchiveInputStream ai = null; try { try { ai = StreamHelper.getInputStream(factory, src, encoding); if (ai == null) { ai = factory.getArchiveStream(new BufferedInputStream(src.getInputStream()), encoding); } } catch (IOException ex) { throw new BuildException("problem opening " + src, ex); } while ((entry = ai.getNextEntry()) != null) { if (skipUnreadable && !ai.canReadEntryData(entry)) { log(Messages.skippedIsUnreadable(entry)); continue; } Resource r = builder.buildResource(src, encoding, entry); String name = entry.getName(); if (entry.isDirectory()) { name = trimSeparator(name); dirEntries.put(name, r); if (match(name)) { matchDirEntries.put(name, r); } } else { fileEntries.put(name, r); if (match(name)) { matchFileEntries.put(name, r); } } } } catch (IOException ex) { throw new BuildException("problem reading " + src, ex); } finally { FileUtils.close(ai); } }
From source file:org.apache.ant.compress.taskdefs.ExpandBase.java
private void expandArchiveStream(String name, ArchiveInputStream is, File dir) throws IOException { FileNameMapper mapper = getMapper(); log("Expanding: " + name + " into " + dir, Project.MSG_INFO); boolean empty = true; ArchiveEntry ent = null; while ((ent = is.getNextEntry()) != null) { if (skipUnreadable && !is.canReadEntryData(ent)) { log(Messages.skippedIsUnreadable(ent)); continue; }//from ww w .j av a 2 s . c o m empty = false; log("extracting " + ent.getName(), Project.MSG_DEBUG); extractFile(FileUtils.getFileUtils(), null, dir, is, ent.getName(), ent.getLastModifiedDate(), ent.isDirectory(), mapper); } if (empty && getFailOnEmptyArchive()) { throw new BuildException("archive '" + name + "' is empty"); } log("expand complete", Project.MSG_VERBOSE); }