List of usage examples for org.apache.commons.compress.archivers.zip ZipArchiveInputStream getNextZipEntry
public ZipArchiveEntry getNextZipEntry() throws IOException
From source file:org.opengion.fukurou.util.ZipArchive.java
/** * ???ZIP??????/* w ww . j a v a2 s .co m*/ * ?(targetPath)???ZIP(zipFile)???? * ????????????????? * <p> * ??????????????? * * @param targetPath ?? * @param zipFile ??ZIP * @param encording ?(Windows???"Windows-31J" ???) * @return ???ZIP? * @og.rev 4.1.0.2 (2008/02/01) ? * @og.rev 4.3.1.1 (2008/08/23) mkdirs ? * @og.rev 4.3.3.3 (2008/10/22) mkdirs???? * @og.rev 5.1.9.0 (2010/08/01) ? * @og.rev 5.7.1.2 (2013/12/20) org.apache.commons.compress ?(??) */ public static List<File> unCompress(final File targetPath, final File zipFile, final String encording) { List<File> list = new ArrayList<File>(); // ???'/'??'\'???? // String tmpPrefix = targetPath; // if( File.separatorChar != targetPath.charAt( targetPath.length() - 1 ) ) { // tmpPrefix = tmpPrefix + File.separator; // } ZipArchiveInputStream zis = null; File tmpFile = null; // String fileName = null; try { zis = new ZipArchiveInputStream(new BufferedInputStream(new FileInputStream(zipFile)), encording); ZipArchiveEntry entry = null; while ((entry = zis.getNextZipEntry()) != null) { // fileName = tmpPrefix + entry.getName().replace( '/', File.separatorChar ); tmpFile = new File(targetPath, entry.getName()); list.add(tmpFile); // ?????? if (entry.isDirectory()) { if (!tmpFile.exists() && !tmpFile.mkdirs()) { String errMsg = "??????[??=" + tmpFile + "]"; System.err.println(errMsg); continue; } } // ???????? else { // 4.3.3.3 (2008/10/22) ????? if (!tmpFile.getParentFile().exists() && !tmpFile.getParentFile().mkdirs()) { String errMsg = "??????[??=" + tmpFile + "]"; System.err.println(errMsg); continue; } BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(tmpFile)); try { IOUtils.copy(zis, out); } catch (IOException zex) { String errMsg = "ZIP??(copy)?????[??=" + tmpFile + "]"; System.err.println(errMsg); continue; } finally { Closer.ioClose(out); } } // 5.1.9.0 (2010/08/01) ? long lastTime = entry.getTime(); if (lastTime >= 0 && !tmpFile.setLastModified(lastTime)) { String errMsg = "ZIP??????[??=" + tmpFile + "]"; System.err.println(errMsg); } } } catch (FileNotFoundException ex) { String errMsg = "????????[??=" + tmpFile + "]"; throw new RuntimeException(errMsg, ex); } catch (IOException ex) { String errMsg = "ZIP???????[??=" + tmpFile + "]"; throw new RuntimeException(errMsg, ex); } finally { Closer.ioClose(zis); } return list; }
From source file:org.waarp.common.tar.ZipUtility.java
/** * Extract all files from Tar into the specified directory * //from www .ja v a 2 s . co m * @param tarFile * @param directory * @return the list of extracted filenames * @throws IOException */ public static List<String> unZip(File tarFile, File directory) throws IOException { List<String> result = new ArrayList<String>(); InputStream inputStream = new FileInputStream(tarFile); ZipArchiveInputStream in = new ZipArchiveInputStream(inputStream); ZipArchiveEntry entry = in.getNextZipEntry(); while (entry != null) { if (entry.isDirectory()) { entry = in.getNextZipEntry(); continue; } File curfile = new File(directory, entry.getName()); File parent = curfile.getParentFile(); if (!parent.exists()) { parent.mkdirs(); } OutputStream out = new FileOutputStream(curfile); IOUtils.copy(in, out); out.close(); result.add(entry.getName()); entry = in.getNextZipEntry(); } in.close(); return result; }
From source file:org.xwiki.contrib.confluence.filter.internal.ConfluenceXMLPackage.java
private void fromStream(InputStream stream) throws IOException { // Get temporary folder this.directory = File.createTempFile("confluencexml", ""); this.directory.delete(); this.directory.mkdir(); this.temporaryDirectory = false; // Extract the zip ZipArchiveInputStream zais = new ZipArchiveInputStream(stream); for (ZipArchiveEntry zipEntry = zais.getNextZipEntry(); zipEntry != null; zipEntry = zais .getNextZipEntry()) {//from w w w . j a v a 2s. c o m if (!zipEntry.isDirectory()) { String path = zipEntry.getName(); File file = new File(this.directory, path); FileUtils.copyInputStreamToFile(new CloseShieldInputStream(zais), file); } } }
From source file:org.xwiki.filter.confluence.xml.internal.ConfluenceXMLPackage.java
public ConfluenceXMLPackage(InputSource source) throws IOException, FilterException, XMLStreamException, FactoryConfigurationError, NumberFormatException, ConfigurationException { InputStream stream;/*from ww w .j a va2 s.c o m*/ if (source instanceof InputStreamInputSource) { stream = ((InputStreamInputSource) source).getInputStream(); } else { throw new FilterException( String.format("Unsupported input source of type [%s]", source.getClass().getName())); } try { // Get temporary folder this.directory = File.createTempFile("confluencexml", ""); this.directory.delete(); this.directory.mkdir(); this.directory.deleteOnExit(); // Extract the zip ZipArchiveInputStream zais = new ZipArchiveInputStream(stream); for (ZipArchiveEntry zipEntry = zais.getNextZipEntry(); zipEntry != null; zipEntry = zais .getNextZipEntry()) { if (!zipEntry.isDirectory()) { String path = zipEntry.getName(); File file = new File(this.directory, path); if (path.equals("entities.xml")) { this.entities = file; } else if (path.equals("exportDescriptor.properties")) { this.descriptor = file; } FileUtils.copyInputStreamToFile(new CloseShieldInputStream(zais), file); } } } finally { source.close(); } // Initialize createTree(); }
From source file:org.xwiki.filter.xar.internal.input.WikiReader.java
public void read(InputStream stream, Object filter, XARInputFilter proxyFilter) throws IOException { ZipArchiveInputStream zis = new ZipArchiveInputStream(stream, "UTF-8", false); for (ZipArchiveEntry entry = zis.getNextZipEntry(); entry != null; entry = zis.getNextZipEntry()) { if (entry.isDirectory() || entry.getName().startsWith("META-INF")) { // The entry is either a directory or is something inside of the META-INF dir. // (we use that directory to put meta data such as LICENSE/NOTICE files.) continue; } else if (entry.getName().equals(XarModel.PATH_PACKAGE)) { // The entry is the manifest (package.xml). Read this differently. try { this.xarPackage.readDescriptor(zis); } catch (Exception e) { if (this.properties.isVerbose()) { this.logger.warn(LOG_DESCRIPTOR_FAILREAD, "Failed to read XAR descriptor from entry [{}]: {}", entry.getName(), ExceptionUtils.getRootCauseMessage(e)); }// www .j av a2 s . c o m } } else { try { this.documentReader.read(zis, filter, proxyFilter); } catch (SkipEntityException skip) { if (this.properties.isVerbose()) { this.logger.info(LOG_DOCUMENT_SKIPPED, "Skipped document [{}]", skip.getEntityReference()); } } catch (Exception e) { if (this.properties.isVerbose()) { this.logger.warn(LOG_DOCUMENT_FAILREAD, "Failed to read XAR XML document from entry [{}]: {}", entry.getName(), ExceptionUtils.getRootCauseMessage(e), e); } } } } }
From source file:org.xwiki.wikistream.confluence.xml.internal.ConfluenceXMLPackage.java
public ConfluenceXMLPackage(InputSource source) throws IOException, WikiStreamException, XMLStreamException, FactoryConfigurationError, NumberFormatException, ConfigurationException { InputStream stream;// w w w .j a v a 2 s . c o m if (source instanceof InputStreamInputSource) { stream = ((InputStreamInputSource) source).getInputStream(); } else { throw new WikiStreamException( String.format("Unsupported input source of type [%s]", source.getClass().getName())); } try { // Get temporary folder this.directory = File.createTempFile("confluencexml", ""); this.directory.delete(); this.directory.mkdir(); this.directory.deleteOnExit(); // Extract the zip ZipArchiveInputStream zais = new ZipArchiveInputStream(stream); for (ZipArchiveEntry zipEntry = zais.getNextZipEntry(); zipEntry != null; zipEntry = zais .getNextZipEntry()) { if (!zipEntry.isDirectory()) { String path = zipEntry.getName(); File file = new File(this.directory, path); if (path.equals("entities.xml")) { this.entities = file; } else if (path.equals("exportDescriptor.properties")) { this.descriptor = file; } FileUtils.copyInputStreamToFile(new CloseShieldInputStream(zais), file); } } } finally { source.close(); } // Initialize createTree(); }
From source file:org.xwiki.wikistream.xar.internal.input.WikiReader.java
public void read(InputStream stream, Object filter, XARInputFilter proxyFilter) throws XMLStreamException, IOException, WikiStreamException { ZipArchiveInputStream zis = new ZipArchiveInputStream(stream, "UTF-8", false); for (ZipArchiveEntry entry = zis.getNextZipEntry(); entry != null; entry = zis.getNextZipEntry()) { if (entry.isDirectory() || entry.getName().startsWith("META-INF")) { // The entry is either a directory or is something inside of the META-INF dir. // (we use that directory to put meta data such as LICENSE/NOTICE files.) continue; } else if (entry.getName().equals(XarModel.PATH_PACKAGE)) { // The entry is the manifest (package.xml). Read this differently. try { this.xarPackage.readDescriptor(zis); } catch (Exception e) { if (this.properties.isVerbose()) { this.logger.warn(LOG_DESCRIPTOR_FAILREAD, "Failed to read XAR descriptor from entry [{}]: {}", entry.getName(), ExceptionUtils.getRootCauseMessage(e)); }// w w w .j a v a 2s . c o m } } else { try { this.documentReader.read(zis, filter, proxyFilter); } catch (SkipEntityException skip) { if (this.properties.isVerbose()) { this.logger.info(LOG_DOCUMENT_SKIPPED, "Skipped document [{}]", skip.getEntityReference()); } } catch (Exception e) { if (this.properties.isVerbose()) { this.logger.warn(LOG_DOCUMENT_FAILREAD, "Failed to read XAR XML document from entry [{}]: {}", entry.getName(), ExceptionUtils.getRootCauseMessage(e)); } } } } }
From source file:org.xwiki.xar.XarPackage.java
/** * Find and add the entries located in the passed XAR file. * /*from ww w .j a v a 2 s .c om*/ * @param xarStream an input stream to a XAR file * @throws IOException when failing to read the file * @throws XarException when failing to parse the XAR package */ public void read(InputStream xarStream) throws IOException, XarException { ZipArchiveInputStream zis = new ZipArchiveInputStream(xarStream, "UTF-8", false); try { for (ZipArchiveEntry entry = zis.getNextZipEntry(); entry != null; entry = zis.getNextZipEntry()) { if (!entry.isDirectory() && zis.canReadEntryData(entry)) { readEntry(zis, entry); } } } finally { zis.close(); } }
From source file:stroom.proxy.repo.StroomStreamProcessor.java
private void processZipStream(final InputStream inputStream, final String prefix) throws IOException { final ByteCountInputStream byteCountInputStream = new ByteCountInputStream(inputStream); final Map<String, MetaMap> bufferedMetaMap = new HashMap<>(); final Map<String, Long> dataStreamSizeMap = new HashMap<>(); final List<String> sendDataList = new ArrayList<>(); final StroomZipNameSet stroomZipNameSet = new StroomZipNameSet(false); final ZipArchiveInputStream zipArchiveInputStream = new ZipArchiveInputStream(byteCountInputStream); ZipArchiveEntry zipEntry = null;/*from w w w . jav a 2 s . co m*/ while (true) { // We have to wrap our stream reading code in a individual try/catch // so we can return to the client an error in the case of a corrupt // stream. try { zipEntry = zipArchiveInputStream.getNextZipEntry(); } catch (final IOException ioEx) { throw new StroomStreamException(StroomStatusCode.COMPRESSED_STREAM_INVALID, ioEx.getMessage()); } if (zipEntry == null) { // All done break; } if (LOGGER.isTraceEnabled()) { LOGGER.trace("process() - " + zipEntry); } final String entryName = prefix + zipEntry.getName(); final StroomZipEntry stroomZipEntry = stroomZipNameSet.add(entryName); if (StroomZipFileType.Meta.equals(stroomZipEntry.getStroomZipFileType())) { final MetaMap entryMetaMap = MetaMapFactory.cloneAllowable(globalMetaMap); // We have to wrap our stream reading code in a individual // try/catch so we can return to the client an error in the case // of a corrupt stream. try { entryMetaMap.read(zipArchiveInputStream, false); } catch (final IOException ioEx) { throw new StroomStreamException(StroomStatusCode.COMPRESSED_STREAM_INVALID, ioEx.getMessage()); } if (appendReceivedPath) { // Here we build up a list of stroom servers that have received // the message // The entry one will be initially set at the boundary Stroom // server final String entryReceivedServer = entryMetaMap.get(StroomHeaderArguments.RECEIVED_PATH); if (entryReceivedServer != null) { if (!entryReceivedServer.contains(getHostName())) { entryMetaMap.put(StroomHeaderArguments.RECEIVED_PATH, entryReceivedServer + "," + getHostName()); } } else { entryMetaMap.put(StroomHeaderArguments.RECEIVED_PATH, getHostName()); } } if (entryMetaMap.containsKey(StroomHeaderArguments.STREAM_SIZE)) { // Header already has stream size so just send it on sendHeader(stroomZipEntry, entryMetaMap); } else { // We need to add the stream size // Send the data file yet ? final String dataFile = stroomZipNameSet.getName(stroomZipEntry.getBaseName(), StroomZipFileType.Data); if (dataFile != null && dataStreamSizeMap.containsKey(dataFile)) { // Yes we can send the header now entryMetaMap.put(StroomHeaderArguments.STREAM_SIZE, String.valueOf(dataStreamSizeMap.get(dataFile))); sendHeader(stroomZipEntry, entryMetaMap); } else { // Else we have to buffer it bufferedMetaMap.put(stroomZipEntry.getBaseName(), entryMetaMap); } } } else { handleEntryStart(stroomZipEntry); long totalRead = 0; int read = 0; while (true) { // We have to wrap our stream reading code in a individual // try/catch so we can return to the client an error in the // case of a corrupt stream. try { read = StreamUtil.eagerRead(zipArchiveInputStream, buffer); } catch (final IOException ioEx) { throw new StroomStreamException(StroomStatusCode.COMPRESSED_STREAM_INVALID, ioEx.getMessage()); } if (read == -1) { break; } streamProgressMonitor.progress(read); handleEntryData(buffer, 0, read); totalRead += read; } handleEntryEnd(); if (StroomZipFileType.Data.equals(stroomZipEntry.getStroomZipFileType())) { sendDataList.add(entryName); dataStreamSizeMap.put(entryName, totalRead); } // Buffered header can now be sent as we have sent the // data if (stroomZipEntry.getBaseName() != null) { final MetaMap entryMetaMap = bufferedMetaMap.remove(stroomZipEntry.getBaseName()); if (entryMetaMap != null) { entryMetaMap.put(StroomHeaderArguments.STREAM_SIZE, String.valueOf(totalRead)); handleEntryStart( new StroomZipEntry(null, stroomZipEntry.getBaseName(), StroomZipFileType.Meta)); final byte[] headerBytes = entryMetaMap.toByteArray(); handleEntryData(headerBytes, 0, headerBytes.length); handleEntryEnd(); } } } } if (stroomZipNameSet.getBaseNameSet().isEmpty()) { if (byteCountInputStream.getByteCount() > 22) { throw new StroomStreamException(StroomStatusCode.COMPRESSED_STREAM_INVALID, "No Zip Entries"); } else { LOGGER.warn("processZipStream() - Zip stream with no entries ! {}", globalMetaMap); } } // Add missing headers for (final String baseName : stroomZipNameSet.getBaseNameList()) { final String headerName = stroomZipNameSet.getName(baseName, StroomZipFileType.Meta); // Send Generic Header if (headerName == null) { final String dataFileName = stroomZipNameSet.getName(baseName, StroomZipFileType.Data); final MetaMap entryMetaMap = MetaMapFactory.cloneAllowable(globalMetaMap); entryMetaMap.put(StroomHeaderArguments.STREAM_SIZE, String.valueOf(dataStreamSizeMap.remove(dataFileName))); sendHeader(new StroomZipEntry(null, baseName, StroomZipFileType.Meta), entryMetaMap); } } }
From source file:stroom.util.zip.StroomStreamProcessor.java
private void processZipStream(final InputStream inputStream, final String prefix) throws IOException { final ByteCountInputStream byteCountInputStream = new ByteCountInputStream(inputStream); final Map<String, HeaderMap> bufferedHeaderMap = new HashMap<String, HeaderMap>(); final Map<String, Long> dataStreamSizeMap = new HashMap<String, Long>(); final List<String> sendDataList = new ArrayList<String>(); final StroomZipNameSet stroomZipNameSet = new StroomZipNameSet(false); final ZipArchiveInputStream zipArchiveInputStream = new ZipArchiveInputStream(byteCountInputStream); ZipArchiveEntry zipEntry = null;// ww w . jav a2 s .c om while (true) { // We have to wrap our stream reading code in a individual try/catch // so we can return to the client an error in the case of a corrupt // stream. try { zipEntry = zipArchiveInputStream.getNextZipEntry(); } catch (final IOException ioEx) { throw new StroomStreamException(StroomStatusCode.COMPRESSED_STREAM_INVALID, ioEx.getMessage()); } if (zipEntry == null) { // All done break; } if (LOGGER.isTraceEnabled()) { LOGGER.trace("process() - " + zipEntry); } final String entryName = prefix + zipEntry.getName(); final StroomZipEntry stroomZipEntry = stroomZipNameSet.add(entryName); if (StroomZipFileType.Meta.equals(stroomZipEntry.getStroomZipFileType())) { final HeaderMap entryHeaderMap = globalHeaderMap.cloneAllowable(); // We have to wrap our stream reading code in a individual // try/catch so we can return to the client an error in the case // of a corrupt stream. try { entryHeaderMap.read(zipArchiveInputStream, false); } catch (final IOException ioEx) { throw new StroomStreamException(StroomStatusCode.COMPRESSED_STREAM_INVALID, ioEx.getMessage()); } if (appendReceivedPath) { // Here we build up a list of stroom servers that have received // the message // The entry one will be initially set at the boundary Stroom // server final String entryReceivedServer = entryHeaderMap.get(StroomHeaderArguments.RECEIVED_PATH); if (entryReceivedServer != null) { if (!entryReceivedServer.contains(getHostName())) { entryHeaderMap.put(StroomHeaderArguments.RECEIVED_PATH, entryReceivedServer + "," + getHostName()); } } else { entryHeaderMap.put(StroomHeaderArguments.RECEIVED_PATH, getHostName()); } } if (entryHeaderMap.containsKey(StroomHeaderArguments.STREAM_SIZE)) { // Header already has stream size so just send it on sendHeader(stroomZipEntry, entryHeaderMap); } else { // We need to add the stream size // Send the data file yet ? final String dataFile = stroomZipNameSet.getName(stroomZipEntry.getBaseName(), StroomZipFileType.Data); if (dataFile != null && dataStreamSizeMap.containsKey(dataFile)) { // Yes we can send the header now entryHeaderMap.put(StroomHeaderArguments.STREAM_SIZE, String.valueOf(dataStreamSizeMap.get(dataFile))); sendHeader(stroomZipEntry, entryHeaderMap); } else { // Else we have to buffer it bufferedHeaderMap.put(stroomZipEntry.getBaseName(), entryHeaderMap); } } } else { handleEntryStart(stroomZipEntry); long totalRead = 0; int read = 0; while (true) { // We have to wrap our stream reading code in a individual // try/catch so we can return to the client an error in the // case of a corrupt stream. try { read = StreamUtil.eagerRead(zipArchiveInputStream, buffer); } catch (final IOException ioEx) { throw new StroomStreamException(StroomStatusCode.COMPRESSED_STREAM_INVALID, ioEx.getMessage()); } if (read == -1) { break; } streamProgressMonitor.progress(read); handleEntryData(buffer, 0, read); totalRead += read; } handleEntryEnd(); if (StroomZipFileType.Data.equals(stroomZipEntry.getStroomZipFileType())) { sendDataList.add(entryName); dataStreamSizeMap.put(entryName, totalRead); } // Buffered header can now be sent as we have sent the // data if (stroomZipEntry.getBaseName() != null) { final HeaderMap entryHeaderMap = bufferedHeaderMap.remove(stroomZipEntry.getBaseName()); if (entryHeaderMap != null) { entryHeaderMap.put(StroomHeaderArguments.STREAM_SIZE, String.valueOf(totalRead)); handleEntryStart( new StroomZipEntry(null, stroomZipEntry.getBaseName(), StroomZipFileType.Meta)); final byte[] headerBytes = entryHeaderMap.toByteArray(); handleEntryData(headerBytes, 0, headerBytes.length); handleEntryEnd(); } } } } if (stroomZipNameSet.getBaseNameSet().isEmpty()) { if (byteCountInputStream.getByteCount() > 22) { throw new StroomStreamException(StroomStatusCode.COMPRESSED_STREAM_INVALID, "No Zip Entries"); } else { LOGGER.warn("processZipStream() - Zip stream with no entries ! %s", globalHeaderMap); } } // Add missing headers for (final String baseName : stroomZipNameSet.getBaseNameList()) { final String headerName = stroomZipNameSet.getName(baseName, StroomZipFileType.Meta); // Send Generic Header if (headerName == null) { final String dataFileName = stroomZipNameSet.getName(baseName, StroomZipFileType.Data); final HeaderMap entryHeaderMap = globalHeaderMap.cloneAllowable(); entryHeaderMap.put(StroomHeaderArguments.STREAM_SIZE, String.valueOf(dataStreamSizeMap.remove(dataFileName))); sendHeader(new StroomZipEntry(null, baseName, StroomZipFileType.Meta), entryHeaderMap); } } }