List of usage examples for org.apache.commons.compress.archivers.zip ZipArchiveEntry getName
public String getName()
From source file:com.fujitsu.dc.core.bar.BarFileReadRunner.java
/** * Zip????????????????.// www. j ava 2 s. c om * @param name ? * @return ???true */ private boolean isMatchEntryName(String name) { boolean ret = false; try { ZipArchiveEntry zae = this.zipArchiveInputStream.getNextZipEntry(); if (zae != null) { ret = zae.getName().equals(name); } } catch (IOException ex) { log.info("bar file entry was not read."); ret = false; } return ret; }
From source file:com.fujitsu.dc.core.bar.BarFileReadRunner.java
/** * Zip????"bar/90_contents"?????./*from w ww .ja va 2s . c o m*/ * @param zae ZipArchiveEntry * @return "bar/90_contents"????true */ private boolean isContentsDir(ZipArchiveEntry zae) { boolean ret = false; if (zae == null) { ret = isMatchEntryName(CONTENTS_DIR); } else { ret = zae.getName().equals(CONTENTS_DIR); } return ret; }
From source file:io.personium.core.bar.BarFileReadRunner.java
/** * bar???./*from ww w . j av a 2s.co m*/ */ public void run() { boolean isSuccess = true; String path = "/" + this.cell.getName() + "/" + boxName + "/"; log.debug("install target: " + path); try { List<String> doneKeys = new ArrayList<String>(); try { this.zipArchiveInputStream = new ZipArchiveInputStream(new FileInputStream(barFile)); } catch (IOException e) { throw PersoniumCoreException.Server.FILE_SYSTEM_ERROR.params(e.getMessage()); } // ("bar/")?? if (!isRootDir()) { String message = PersoniumCoreMessageUtils.getMessage("PL-BI-2001"); writeOutputStream(true, "PL-BI-1004", ROOT_DIR, message); isSuccess = false; return; } // 00_meta?? if (!isMetadataDir()) { String message = PersoniumCoreMessageUtils.getMessage("PL-BI-2001"); writeOutputStream(true, "PL-BI-1004", META_DIR, message); isSuccess = false; return; } // 00_meta??? ZipArchiveEntry zae = null; try { long maxBarEntryFileSize = getMaxBarEntryFileSize(); Set<String> keyList = barFileOrder.keySet(); while ((zae = this.zipArchiveInputStream.getNextZipEntry()) != null) { String entryName = zae.getName(); log.debug("Entry Name: " + entryName); log.debug("Entry Size: " + zae.getSize()); log.debug("Entry Compressed Size: " + zae.getCompressedSize()); if (!zae.isDirectory()) { this.progressInfo.addDelta(1L); } // bar?? isSuccess = createMetadata(zae, entryName, maxBarEntryFileSize, keyList, doneKeys); if (!isSuccess) { break; } // 90_contents?????? if (isContentsDir(zae)) { if (davCmpMap.isEmpty()) { writeOutputStream(true, "PL-BI-1004", zae.getName()); isSuccess = false; } else { writeOutputStream(false, "PL-BI-1003", zae.getName()); } doneKeys.add(zae.getName()); break; } } } catch (IOException ex) { isSuccess = false; log.info("IOException: " + ex.getMessage(), ex.fillInStackTrace()); } // 90_contents()??? if (isSuccess && isContentsDir(zae)) { isSuccess = createContents(); } // ?????????? // ??????? if (isSuccess) { Set<String> filenameList = barFileOrder.keySet(); for (String filename : filenameList) { Boolean isNecessary = barFileOrder.get(filename); if (isNecessary && !doneKeys.contains(filename)) { String message = PersoniumCoreMessageUtils.getMessage("PL-BI-2001"); writeOutputStream(true, "PL-BI-1004", filename, message); isSuccess = false; } } } } catch (Throwable ex) { isSuccess = false; String message = getErrorMessage(ex); log.info("Exception: " + message, ex.fillInStackTrace()); writeOutputStream(true, "PL-BI-1005", "", message); } finally { if (isSuccess) { writeOutputStream(false, CODE_BAR_INSTALL_COMPLETED, this.cell.getUrl() + boxName, ""); this.progressInfo.setStatus(ProgressInfo.STATUS.COMPLETED); } else { String message = PersoniumCoreMessageUtils.getMessage("PL-BI-2001"); writeOutputStream(false, CODE_BAR_INSTALL_FAILED, this.cell.getUrl() + boxName, message); this.progressInfo.setStatus(ProgressInfo.STATUS.FAILED); } this.progressInfo.setEndTime(); writeToProgressCache(true); IOUtils.closeQuietly(this.zipArchiveInputStream); if (this.barFile.exists() && !this.barFile.delete()) { log.warn("Failed to remove bar file. [" + this.barFile.getAbsolutePath() + "]."); } } }
From source file:com.fujitsu.dc.core.bar.BarFileReadRunner.java
/** * bar???./* www . j a va2 s . c o m*/ */ public void run() { boolean isSuccess = true; String path = "/" + this.cell.getName() + "/" + boxName + "/"; log.debug("install target: " + path); try { List<String> doneKeys = new ArrayList<String>(); try { this.zipArchiveInputStream = new ZipArchiveInputStream(new FileInputStream(barFile)); } catch (IOException e) { throw DcCoreException.Server.FILE_SYSTEM_ERROR.params(e.getMessage()); } // ("bar/")?? if (!isRootDir()) { String message = DcCoreMessageUtils.getMessage("PL-BI-2001"); writeOutputStream(true, "PL-BI-1004", ROOT_DIR, message); isSuccess = false; return; } // 00_meta?? if (!isMetadataDir()) { String message = DcCoreMessageUtils.getMessage("PL-BI-2001"); writeOutputStream(true, "PL-BI-1004", META_DIR, message); isSuccess = false; return; } // 00_meta??? ZipArchiveEntry zae = null; try { long maxBarEntryFileSize = getMaxBarEntryFileSize(); Set<String> keyList = barFileOrder.keySet(); while ((zae = this.zipArchiveInputStream.getNextZipEntry()) != null) { String entryName = zae.getName(); log.debug("Entry Name: " + entryName); log.debug("Entry Size: " + zae.getSize()); log.debug("Entry Compressed Size: " + zae.getCompressedSize()); if (!zae.isDirectory()) { this.progressInfo.addDelta(1L); } // bar?? isSuccess = createMetadata(zae, entryName, maxBarEntryFileSize, keyList, doneKeys); if (!isSuccess) { break; } // 90_contents?????? if (isContentsDir(zae)) { if (davCmpMap.isEmpty()) { writeOutputStream(true, "PL-BI-1004", zae.getName()); isSuccess = false; } else { writeOutputStream(false, "PL-BI-1003", zae.getName()); } doneKeys.add(zae.getName()); break; } } } catch (IOException ex) { isSuccess = false; log.info("IOException: " + ex.getMessage(), ex.fillInStackTrace()); } // 90_contents()??? if (isSuccess && isContentsDir(zae)) { isSuccess = createContents(); } // ?????????? // ??????? if (isSuccess) { Set<String> filenameList = barFileOrder.keySet(); for (String filename : filenameList) { Boolean isNecessary = barFileOrder.get(filename); if (isNecessary && !doneKeys.contains(filename)) { String message = DcCoreMessageUtils.getMessage("PL-BI-2001"); writeOutputStream(true, "PL-BI-1004", filename, message); isSuccess = false; } } } } catch (Throwable ex) { isSuccess = false; String message = getErrorMessage(ex); log.info("Exception: " + message, ex.fillInStackTrace()); writeOutputStream(true, "PL-BI-1005", "", message); } finally { if (isSuccess) { writeOutputStream(false, CODE_BAR_INSTALL_COMPLETED, this.cell.getUrl() + boxName, ""); this.progressInfo.setStatus(ProgressInfo.STATUS.COMPLETED); } else { String message = DcCoreMessageUtils.getMessage("PL-BI-2001"); writeOutputStream(false, CODE_BAR_INSTALL_FAILED, this.cell.getUrl() + boxName, message); this.progressInfo.setStatus(ProgressInfo.STATUS.FAILED); } this.progressInfo.setEndTime(); writeToProgressCache(true); IOUtils.closeQuietly(this.zipArchiveInputStream); if (this.barFile.exists() && !this.barFile.delete()) { log.warn("Failed to remove bar file. [" + this.barFile.getAbsolutePath() + "]."); } } }
From source file:io.personium.core.bar.BarFileReadRunner.java
/** * bar?(bar/90_contents)1????.//from w w w .j a va 2 s.c o m * @return boolean ???? */ protected boolean createContents() { boolean isSuccess = true; // Collection???Map?????? Map<String, DavCmp> odataCols = getCollections(DavCmp.TYPE_COL_ODATA); Map<String, DavCmp> webdavCols = getCollections(DavCmp.TYPE_COL_WEBDAV); Map<String, DavCmp> serviceCols = getCollections(DavCmp.TYPE_COL_SVC); DavCmp davCmp = null; List<String> doneKeys = new ArrayList<String>(); try { ZipArchiveEntry zae = null; String currentPath = null; int userDataCount = 0; List<JSONMappedObject> userDataLinks = new ArrayList<JSONMappedObject>(); LinkedHashMap<String, BulkRequest> bulkRequests = new LinkedHashMap<String, BulkRequest>(); Map<String, String> fileNameMap = new HashMap<String, String>(); PersoniumODataProducer producer = null; while ((zae = this.zipArchiveInputStream.getNextZipEntry()) != null) { String entryName = zae.getName(); log.debug("Entry Name: " + entryName); log.debug("Entry Size: " + zae.getSize()); log.debug("Entry Compressed Size: " + zae.getCompressedSize()); if (!zae.isDirectory()) { this.progressInfo.addDelta(1L); } writeOutputStream(false, CODE_BAR_INSTALL_STARTED, entryName); // ODataCollection?Dav/ServiceCollection/ODataCollection????????? // ?????????? if (currentPath != null && !entryName.startsWith(currentPath)) { if (!execBulkRequest(davCmp.getCell().getId(), bulkRequests, fileNameMap, producer)) { return false; } if (!createUserdataLinks(producer, userDataLinks)) { return false; } userDataLinks = new ArrayList<JSONMappedObject>(); currentPath = null; } int entryType = getEntryType(entryName, odataCols, webdavCols, serviceCols, this.davFileMap); switch (entryType) { case TYPE_ODATA_COLLECTION: // OData? if (!odataCols.isEmpty()) { if (!isValidODataContents(entryName, odataCols, doneKeys)) { return false; } Pattern formatPattern = Pattern.compile(CONTENTS_DIR + ".+/90_data/"); Matcher formatMatcher = formatPattern.matcher(entryName); if (formatMatcher.matches()) { currentPath = entryName; } Pattern userodataDirPattern = Pattern.compile(CONTENTS_DIR + ".+/90_data/.+"); Matcher userodataDirMatcher = userodataDirPattern.matcher(entryName); if (getFileExtension(entryName).equals(".xml")) { // 00_$metadata.xml?? davCmp = getCollection(entryName, odataCols); // OData???????? producer = davCmp.getODataProducer(); if (!registUserSchema(entryName, this.zipArchiveInputStream, davCmp)) { doneKeys.add(entryName); return false; } writeOutputStream(false, "PL-BI-1003", entryName); doneKeys.add(entryName); continue; } else if (entryName.endsWith(USERDATA_LINKS_JSON)) { userDataLinks = registJsonLinksUserdata(entryName, this.zipArchiveInputStream); if (userDataLinks == null) { doneKeys.add(entryName); return false; } writeOutputStream(false, "PL-BI-1003", entryName); doneKeys.add(entryName); continue; } else if (userodataDirMatcher.matches() && getFileExtension(entryName).equals(".json")) { userDataCount++; if (!setBulkRequests(entryName, producer, bulkRequests, fileNameMap)) { return false; } doneKeys.add(entryName); if ((userDataCount % bulkSize) == 0 && !execBulkRequest(davCmp.getCell().getId(), bulkRequests, fileNameMap, producer)) { return false; } continue; } else if (!entryName.endsWith("/")) { // xml,json????????? String message = PersoniumCoreMessageUtils.getMessage("PL-BI-2001"); log.info(message + " [" + entryName + "]"); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; } } break; case TYPE_DAV_FILE: // WebDAV? // bar/90_contents/{davcol_name}??1???? if (!registWebDavFile(entryName, this.zipArchiveInputStream, webdavCols)) { return false; } break; case TYPE_SVC_FILE: // Service? if (!installSvcCollection(webdavCols, entryName)) { return false; } break; case TYPE_MISMATCH: // OData??????????rootprops?????? String message = PersoniumCoreMessageUtils.getMessage("PL-BI-2006"); log.info(message + " [" + entryName + "]"); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; default: break; } writeOutputStream(false, "PL-BI-1003", entryName); doneKeys.add(entryName); } // ODataCollection?????????????????? if (currentPath != null) { if (!execBulkRequest(davCmp.getCell().getId(), bulkRequests, fileNameMap, producer)) { return false; } if (!createUserdataLinks(producer, userDataLinks)) { return false; } userDataLinks = null; } } catch (IOException ex) { isSuccess = false; log.info("IOException: " + ex.getMessage(), ex.fillInStackTrace()); String message = PersoniumCoreMessageUtils.getMessage("PL-BI-2000"); writeOutputStream(true, CODE_BAR_INSTALL_FAILED, "", message); } // bar/90_contents/{odatacol_name}/00_$metadata.xml)?? isSuccess = checkNecessaryFile(isSuccess, odataCols, doneKeys); return isSuccess; }
From source file:com.fujitsu.dc.core.bar.BarFileReadRunner.java
/** * bar?(bar/90_contents)1????.// w w w . j a va 2 s. c om * @return boolean ???? */ protected boolean createContents() { boolean isSuccess = true; // Collection???Map?????? Map<String, DavCmpEsImpl> odataCols = getCollections(DavCmp.TYPE_COL_ODATA); Map<String, DavCmpEsImpl> webdavCols = getCollections(DavCmp.TYPE_COL_WEBDAV); Map<String, DavCmpEsImpl> serviceCols = getCollections(DavCmp.TYPE_COL_SVC); DavCmpEsImpl davCmp = null; List<String> doneKeys = new ArrayList<String>(); try { ZipArchiveEntry zae = null; String currentPath = null; int userDataCount = 0; List<JSONMappedObject> userDataLinks = new ArrayList<JSONMappedObject>(); LinkedHashMap<String, BulkRequest> bulkRequests = new LinkedHashMap<String, BulkRequest>(); Map<String, String> fileNameMap = new HashMap<String, String>(); DcODataProducer producer = null; while ((zae = this.zipArchiveInputStream.getNextZipEntry()) != null) { String entryName = zae.getName(); log.debug("Entry Name: " + entryName); log.debug("Entry Size: " + zae.getSize()); log.debug("Entry Compressed Size: " + zae.getCompressedSize()); if (!zae.isDirectory()) { this.progressInfo.addDelta(1L); } writeOutputStream(false, CODE_BAR_INSTALL_STARTED, entryName); // ODataCollection?Dav/ServiceCollection/ODataCollection????????? // ?????????? if (currentPath != null && !entryName.startsWith(currentPath)) { if (!execBulkRequest(davCmp.getCellId(), bulkRequests, fileNameMap, producer)) { return false; } if (!createUserdataLinks(producer, userDataLinks)) { return false; } userDataLinks = new ArrayList<JSONMappedObject>(); currentPath = null; } int entryType = getEntryType(entryName, odataCols, webdavCols, serviceCols, this.davFileMap); switch (entryType) { case TYPE_ODATA_COLLECTION: // OData? if (!odataCols.isEmpty()) { if (!isValidODataContents(entryName, odataCols, doneKeys)) { return false; } Pattern formatPattern = Pattern.compile(CONTENTS_DIR + ".+/90_data/"); Matcher formatMatcher = formatPattern.matcher(entryName); if (formatMatcher.matches()) { currentPath = entryName; } Pattern userodataDirPattern = Pattern.compile(CONTENTS_DIR + ".+/90_data/.+"); Matcher userodataDirMatcher = userodataDirPattern.matcher(entryName); if (getFileExtension(entryName).equals(".xml")) { // 00_$metadata.xml?? davCmp = getCollection(entryName, odataCols); // OData???????? producer = davCmp.getODataProducer(); if (!registUserSchema(entryName, this.zipArchiveInputStream, davCmp)) { doneKeys.add(entryName); return false; } writeOutputStream(false, "PL-BI-1003", entryName); doneKeys.add(entryName); continue; } else if (entryName.endsWith(USERDATA_LINKS_JSON)) { userDataLinks = registJsonLinksUserdata(entryName, this.zipArchiveInputStream); if (userDataLinks == null) { doneKeys.add(entryName); return false; } writeOutputStream(false, "PL-BI-1003", entryName); doneKeys.add(entryName); continue; } else if (userodataDirMatcher.matches() && getFileExtension(entryName).equals(".json")) { userDataCount++; if (!setBulkRequests(entryName, producer, bulkRequests, fileNameMap)) { return false; } doneKeys.add(entryName); if ((userDataCount % bulkSize) == 0 && !execBulkRequest(davCmp.getCellId(), bulkRequests, fileNameMap, producer)) { return false; } continue; } else if (!entryName.endsWith("/")) { // xml,json????????? String message = DcCoreMessageUtils.getMessage("PL-BI-2001"); log.info(message + " [" + entryName + "]"); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; } } break; case TYPE_DAV_FILE: // WebDAV? // bar/90_contents/{davcol_name}??1???? if (!registWebDavFile(entryName, this.zipArchiveInputStream, webdavCols)) { return false; } break; case TYPE_SVC_FILE: // Service? if (!installSvcCollection(webdavCols, entryName)) { return false; } break; case TYPE_MISMATCH: // OData??????????rootprops?????? String message = DcCoreMessageUtils.getMessage("PL-BI-2006"); log.info(message + " [" + entryName + "]"); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; default: break; } writeOutputStream(false, "PL-BI-1003", entryName); doneKeys.add(entryName); } // ODataCollection?????????????????? if (currentPath != null) { if (!execBulkRequest(davCmp.getCellId(), bulkRequests, fileNameMap, producer)) { return false; } if (!createUserdataLinks(producer, userDataLinks)) { return false; } userDataLinks = null; } } catch (IOException ex) { isSuccess = false; log.info("IOException: " + ex.getMessage(), ex.fillInStackTrace()); String message = DcCoreMessageUtils.getMessage("PL-BI-2000"); writeOutputStream(true, CODE_BAR_INSTALL_FAILED, "", message); } // bar/90_contents/{odatacol_name}/00_$metadata.xml)?? isSuccess = checkNecessaryFile(isSuccess, odataCols, doneKeys); return isSuccess; }
From source file:cz.muni.fi.xklinec.zipstream.Mallory.java
/** * Entry point. //w w w . j av a2 s. c o m * * @param args * @throws FileNotFoundException * @throws IOException * @throws NoSuchFieldException * @throws ClassNotFoundException * @throws NoSuchMethodException * @throws java.lang.InterruptedException * @throws java.lang.CloneNotSupportedException */ public void doMain(String[] args) throws FileNotFoundException, IOException, NoSuchFieldException, ClassNotFoundException, NoSuchMethodException, InterruptedException, CloneNotSupportedException { // command line argument parser CmdLineParser parser = new CmdLineParser(this); // if you have a wider console, you could increase the value; // here 80 is also the default parser.setUsageWidth(80); try { // parse the arguments. parser.parseArgument(args); } catch (CmdLineException e) { // if there's a problem in the command line, // you'll get this exception. this will report // an error message. System.err.println(e.getMessage()); System.err.println("java Mallory [options...] arguments..."); // print the list of available options parser.printUsage(System.err); System.err.println(); // print option sample. This is useful some time System.err.println(" Example: java Mallory " + parser.printExample(ExampleMode.ALL)); return; } if (arguments.size() == 2) { final String a0 = arguments.get(0); final String a1 = arguments.get(1); if (!quiet) System.err .println(String.format("Will use file [%s] as input file and [%s] as output file", a0, a1)); fis = new FileInputStream(a0); fos = new FileOutputStream(a1); } else if (arguments.isEmpty()) { if (!quiet) System.err .println(String.format("Will use file [STDIN] as input file and [STDOUT] as output file")); fis = System.in; fos = System.out; } else { if (!quiet) System.err.println("I do not understand the usage."); return; } if (zipAlign) { System.err.println("WARNING: ZIP Align feature not implemented yet..."); return; } // Deflater to re-compress uncompressed data read from ZIP stream. def = new Deflater(9, true); sentFiles = new HashSet<String>(); // Buffer input stream so input stream is read in chunks bis = new BufferedInputStream(fis); bos = new BufferedOutputStream(fos); // Effective temporary dir - if separate is required if (separateTempDir) { effectiveTempDir = File.createTempFile("temp_apk_dir_", "", new File(TEMP_DIR)); effectiveTempDir.delete(); effectiveTempDir.mkdir(); } else { effectiveTempDir = new File(TEMP_DIR); } // Generate temporary APK filename tempApk = File.createTempFile("temp_apk_", ".apk", effectiveTempDir); if (tempApk.canWrite() == false) { throw new IOException("Temp file is not writable!"); } FileOutputStream tos = new FileOutputStream(tempApk); // What we want here is to read input stream from the socket/pipe // whatever, process it in ZIP logic and simultaneously to copy // all read data to the temporary file - this reminds tee command // logic. This functionality can be found in TeeInputStream. TeeInputStream tis = new TeeInputStream(bis, tos); // Providing tis to ZipArchiveInputStream will copy all read data // to temporary tos file. zip = new ZipArchiveInputStream(tis); // List of all sent files, with data and hashes alMap = new HashMap<String, PostponedEntry>(); // Output stream // If there is defined slow down stream, it is used for user output to // mitigate tampering time gap. OutputStream osToUse = bos; SlowDownStream sdStream = null; if (slowDownStream) { // New slow down output stream with internal pipe buffer 15MB. sdStream = new SlowDownStream(osToUse, 15 * 1024 * 1024); // If size of the APK is known, use it to set slow down parameters correctly. if (apkSize > 0) { setSlowDownParams(); } if (!quiet) { System.err.println(String.format("Slown down stream will be used; apkSize=%d buffer=%d timeout=%d", apkSize, slowDownBuffer, slowDownTimeout)); } sdStream.setFlushBufferSize(slowDownBuffer); sdStream.setFlushBufferTimeout(slowDownTimeout); sdStream.start(); osToUse = sdStream; } zop = new ZipArchiveOutputStream(osToUse); zop.setLevel(9); if (!quiet) { System.err.println("Patterns that will be excluded:"); for (String regex : exclude) { System.err.println(" '" + regex + "'"); } System.err.println(); } // Read the archive ZipArchiveEntry ze = zip.getNextZipEntry(); while (ze != null) { ZipExtraField[] extra = ze.getExtraFields(true); byte[] lextra = ze.getLocalFileDataExtra(); UnparseableExtraFieldData uextra = ze.getUnparseableExtraFieldData(); byte[] uextrab = uextra != null ? uextra.getLocalFileDataData() : null; byte[] ex = ze.getExtra(); // ZipArchiveOutputStream.DEFLATED // // Data for entry byte[] byteData = Utils.readAll(zip); byte[] deflData = new byte[0]; int infl = byteData.length; int defl = 0; // If method is deflated, get the raw data (compress again). // Since ZIPStream automatically decompresses deflated files in read(). if (ze.getMethod() == ZipArchiveOutputStream.DEFLATED) { def.reset(); def.setInput(byteData); def.finish(); byte[] deflDataTmp = new byte[byteData.length * 2]; defl = def.deflate(deflDataTmp); deflData = new byte[defl]; System.arraycopy(deflDataTmp, 0, deflData, 0, defl); } if (!quiet) System.err.println(String.format( "ZipEntry: meth=%d " + "size=%010d isDir=%5s " + "compressed=%07d extra=%d lextra=%d uextra=%d ex=%d " + "comment=[%s] " + "dataDesc=%s " + "UTF8=%s " + "infl=%07d defl=%07d " + "name [%s]", ze.getMethod(), ze.getSize(), ze.isDirectory(), ze.getCompressedSize(), extra != null ? extra.length : -1, lextra != null ? lextra.length : -1, uextrab != null ? uextrab.length : -1, ex != null ? ex.length : -1, ze.getComment(), ze.getGeneralPurposeBit().usesDataDescriptor(), ze.getGeneralPurposeBit().usesUTF8ForNames(), infl, defl, ze.getName())); final String curName = ze.getName(); // Store zip entry to the map for later check after the APK is recompiled. // Hashes will be compared with the modified APK files after the process. PostponedEntry al = new PostponedEntry(ze, byteData, deflData); alMap.put(curName, al); // META-INF files should be always on the end of the archive, // thus add postponed files right before them if (isPostponed(ze)) { // Capturing interesting files for us and store for later. // If the file is not interesting, send directly to the stream. if (!quiet) System.err.println(" Interesting file, postpone sending!!!"); } else { // recompute CRC? if (recomputeCrc) { crc.reset(); crc.update(byteData); final long newCrc = crc.getValue(); if (!quiet && ze.getCrc() != newCrc && ze.getCrc() != -1) { System.err.println(" Warning: file CRC mismatch!!! Original: [" + ze.getCrc() + "] real: [" + newCrc + "]"); } ze.setCrc(newCrc); } // Write ZIP entry to the archive zop.putArchiveEntry(ze); // Add file data to the stream zop.write(byteData, 0, infl); zop.closeArchiveEntry(); zop.flush(); // Mark file as sent. addSent(curName); } ze = zip.getNextZipEntry(); } // Flush buffers zop.flush(); fos.flush(); // Cleaning up stuff, all reading streams can be closed now. zip.close(); bis.close(); fis.close(); tis.close(); tos.close(); // // APK is finished here, all non-interesting files were sent to the // zop strem (socket to the victim). Now APK transformation will // be performed, diff, sending rest of the files to zop. // boolean doPadding = paddExtra > 0 || outBytes > 0; long flen = tempApk.length(); if (outBytes <= 0) { outBytes = flen + paddExtra; } if (!quiet) { System.err.println("\nAPK reading finished, going to tamper downloaded " + " APK file [" + tempApk.toString() + "]; filezise=[" + flen + "]"); System.err.println(String.format("Sent so far: %d kB in %f %% after adding padding it is %f %%", zop.getWritten() / 1024, 100.0 * (double) zop.getWritten() / (double) flen, 100.0 * (double) zop.getWritten() / ((double) (outBytes > 0 ? outBytes : flen)))); } // New APK was generated, new filename = "tempApk_tampered" newApk = new File(outFile == null ? getFileName(tempApk.getAbsolutePath()) : outFile); if (cmd == null) { // Simulation of doing some evil stuff on the temporary apk Thread.sleep(3000); if (!quiet) System.err.println( "Tampered APK file: " + " [" + newApk.toString() + "]; filezise=[" + newApk.length() + "]"); // // Since no tampering was performed right now we will simulate it by just simple // copying the APK file // FileUtils.copyFile(tempApk, newApk); } else { try { // Execute command String cmd2exec; switch (cmdFormat) { case 0: cmd2exec = cmd + " " + tempApk.getAbsolutePath(); break; case 1: cmd2exec = cmd.replaceAll(INPUT_APK_PLACEHOLDER, tempApk.getAbsolutePath()); break; case 2: cmd2exec = cmd.replaceAll(INPUT_APK_PLACEHOLDER, tempApk.getAbsolutePath()); cmd2exec = cmd2exec.replaceAll(OUTPUT_APK_PLACEHOLDER, newApk.getAbsolutePath()); break; default: throw new IllegalArgumentException("Unknown command format number"); } if (!quiet) { System.err.println("Command to be executed: " + cmd2exec); System.err.println("\n<CMDOUTPUT>"); } long cmdStartTime = System.currentTimeMillis(); CmdExecutionResult resExec = execute(cmd2exec, OutputOpt.EXECUTE_STD_COMBINE, null, quiet ? null : System.err); long cmdStopTime = System.currentTimeMillis(); if (!quiet) { System.err.println("</CMDOUTPUT>\n"); System.err.println("Command executed. Return value: " + resExec.exitValue + "; tamperingTime=" + (cmdStopTime - cmdStartTime)); } } catch (IOException e) { if (!quiet) e.printStackTrace(System.err); } } // // Now read new APK file with ZipInputStream and push new/modified files to the ZOP. // fis = new FileInputStream(newApk); bis = new BufferedInputStream(fis); zip = new ZipArchiveInputStream(bis); // Merge tampered APK to the final, but in this first time // do it to the external buffer in order to get final apk size. // Backup ZOP state to the clonned instance. zop.flush(); long writtenBeforeDiff = zop.getWritten(); ZipArchiveOutputStream zop_back = zop; zop = zop.cloneThis(); // Set temporary byte array output stream, so original output stream is not // touched in this phase. ByteArrayOutputStream bbos = new ByteArrayOutputStream(); zop.setOut(bbos); mergeTamperedApk(false, false); zop.flush(); // Now output stream almost contains APK file, central directory is not written yet. long writtenAfterDiff = zop.getWritten(); if (!quiet) System.err.println(String.format("Tampered apk size yet; writtenBeforeDiff=%d writtenAfterDiff=%d", writtenBeforeDiff, writtenAfterDiff)); // Write central directory header to temporary buffer to discover its size. zop.writeFinish(); zop.flush(); bbos.flush(); // Read new values long writtenAfterCentralDir = zop.getWritten(); long centralDirLen = zop.getCdLength(); byte[] buffAfterMerge = bbos.toByteArray(); //int endOfCentralDir = (int) (buffAfterMerge.length - (writtenAfterCentralDir-writtenBeforeDiff)); long endOfCentralDir = END_OF_CENTRAL_DIR_SIZE; // Determine number of bytes to add to APK. // padlen is number of bytes missing in APK to meet desired size in bytes. padlen = doPadding ? (outBytes - (writtenAfterCentralDir + endOfCentralDir)) : 0; // Compute number of files needed for padding. int padfiles = (int) Math.ceil((double) padlen / (double) (PAD_BLOCK_MAX)); if (!quiet) System.err.println(String.format("Remaining to pad=%d, padfiles=%d " + "writtenAfterCentralDir=%d " + "centralDir=%d endOfCentralDir=%d centralDirOffset=%d " + "buffSize=%d total=%d desired=%d ", padlen, padfiles, writtenAfterCentralDir, centralDirLen, endOfCentralDir, zop.getCdOffset(), buffAfterMerge.length, writtenAfterCentralDir + endOfCentralDir, outBytes)); if (padlen < 0) { throw new IllegalStateException("Padlen cannot be negative, please increase padding size"); } // Close input streams for tampered APK try { zip.close(); bis.close(); fis.close(); } catch (Exception e) { if (!quiet) e.printStackTrace(System.err); } // Merge again, now with pre-defined padding size. fis = new FileInputStream(newApk); bis = new BufferedInputStream(fis); zip = new ZipArchiveInputStream(bis); // Revert changes - use clonned writer stream. zop = zop_back; long writtenBeforeDiff2 = zop.getWritten(); // Merge tampered APK, now for real, now with computed padding. mergeTamperedApk(true, true); zop.flush(); long writtenAfterMerge2 = zop.getWritten(); // Finish really zop.finish(); zop.flush(); long writtenReally = zop.getWritten(); long centralDirLen2 = zop.getCdLength(); if (!quiet) System.err.println(String.format( "Write stats; " + "writtenBeforeDiff=%d writtenAfterDiff=%d " + "writtenAfterCentralDir=%d centralDir=%d endOfCd=%d centralDirOffset=%d " + "padlen=%d total=%d desired=%d", writtenBeforeDiff2, writtenAfterMerge2, writtenReally, centralDirLen2, endOfCentralDir, zop.getCdOffset(), padlen, writtenReally + endOfCentralDir, outBytes)); // Will definitelly close (and finish if not yet) ZOP stream // and close underlying stream. zop.close(); if (sdStream != null) { if (!quiet) { System.err.println("Waiting for sdStream to finish..."); } // Wait for stream to finish dumping with pre-set speed, if it takes // too long (1 minute) switch slown down stream to dumping mode // without any waiting. long startedDump = System.currentTimeMillis(); while (sdStream.isRunning()) { long curTime = System.currentTimeMillis(); if (startedDump != -1 && (curTime - startedDump) > 1000 * 120) { startedDump = -1; sdStream.flushPipes(); } Thread.sleep(10); } if (!quiet) { System.err.println("SD stream finished, terminating..."); } } // Should always be same if (!quiet && doPadding && writtenBeforeDiff != writtenBeforeDiff2) { System.err.println(String.format("Warning! Size before merge from pass1 and pass2 does not match.")); } // If size is different, something went wrong. if (!quiet && doPadding && ((writtenReally + endOfCentralDir) != outBytes)) { System.err.println(String.format("Warning! Output size differs from desired size.")); } bos.close(); fos.close(); // Delete temporary files if required if (deleteArtefacts) { try { if (newApk.exists()) { newApk.delete(); if (!quiet) System.err.println("Tampered APK removed. " + newApk.getAbsolutePath()); } if (tempApk.exists()) { tempApk.delete(); if (!quiet) System.err.println("Original APK removed. " + tempApk.getAbsolutePath()); } if (separateTempDir && effectiveTempDir.exists()) { FileUtils.deleteDirectory(effectiveTempDir); if (!quiet) System.err.println("Temporary directory removed. " + effectiveTempDir.getAbsolutePath()); } if (!quiet) System.err.println("Temporary files were removed."); } catch (IOException e) { if (!quiet) e.printStackTrace(System.err); } } if (!quiet) System.err.println("THE END!"); }
From source file:com.fujitsu.dc.core.bar.BarFileReadRunner.java
/** * ????????./*w ww. ja v a2s . c o m*/ * @param zae * @param entryName * @param maxSize * @param doneKeys * @return ???true? */ private boolean isValidFileStructure(ZipArchiveEntry zae, String entryName, long maxSize, List<String> doneKeys) { writeOutputStream(false, CODE_BAR_INSTALL_STARTED, entryName); // ??????? if (!barFileOrder.containsKey(entryName)) { log.info("[" + entryName + "] invalid file"); String message = DcCoreMessageUtils.getMessage("PL-BI-2001"); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; } // ????? Pattern formatPattern = Pattern.compile(".*/+([0-9][0-9])_.*"); Matcher formatMatcher = formatPattern.matcher(entryName); String entryIndex = formatMatcher.replaceAll("$1"); if (doneKeys.isEmpty()) { // ?????"00"????? if (!entryIndex.equals("00")) { log.info("bar/00_meta/00_manifest.json is not exsist"); String message = DcCoreMessageUtils.getMessage("PL-BI-2001"); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; } } else { String lastEntryName = doneKeys.get(doneKeys.size() - 1); formatMatcher = formatPattern.matcher(lastEntryName); String lastEntryIndex = formatMatcher.replaceAll("$1"); // ?????? if (entryIndex.compareTo(lastEntryIndex) < 0) { log.info("[" + entryName + "] invalid file"); String message = DcCoreMessageUtils.getMessage("PL-BI-2001"); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; } } // [400]bar/bar?????? if (zae.getSize() > (long) (maxSize * MB)) { log.info("Bar file entry size too large invalid file [" + entryName + "]"); String message = DcCoreException.BarInstall.BAR_FILE_ENTRY_SIZE_TOO_LARGE .params(zae.getName(), String.valueOf(zae.getSize())).getMessage(); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; } return true; }
From source file:io.personium.core.bar.BarFileReadRunner.java
/** * ????????./* w w w . j av a 2 s .com*/ * @param zae * @param entryName * @param maxSize * @param doneKeys * @return ???true? */ private boolean isValidFileStructure(ZipArchiveEntry zae, String entryName, long maxSize, List<String> doneKeys) { writeOutputStream(false, CODE_BAR_INSTALL_STARTED, entryName); // ??????? if (!barFileOrder.containsKey(entryName)) { log.info("[" + entryName + "] invalid file"); String message = PersoniumCoreMessageUtils.getMessage("PL-BI-2001"); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; } // ????? Pattern formatPattern = Pattern.compile(".*/+([0-9][0-9])_.*"); Matcher formatMatcher = formatPattern.matcher(entryName); String entryIndex = formatMatcher.replaceAll("$1"); if (doneKeys.isEmpty()) { // ?????"00"????? if (!entryIndex.equals("00")) { log.info("bar/00_meta/00_manifest.json is not exsist"); String message = PersoniumCoreMessageUtils.getMessage("PL-BI-2001"); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; } } else { String lastEntryName = doneKeys.get(doneKeys.size() - 1); formatMatcher = formatPattern.matcher(lastEntryName); String lastEntryIndex = formatMatcher.replaceAll("$1"); // ?????? if (entryIndex.compareTo(lastEntryIndex) < 0) { log.info("[" + entryName + "] invalid file"); String message = PersoniumCoreMessageUtils.getMessage("PL-BI-2001"); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; } } // [400]bar/bar?????? if (zae.getSize() > (long) (maxSize * MB)) { log.info("Bar file entry size too large invalid file [" + entryName + "]"); String message = PersoniumCoreException.BarInstall.BAR_FILE_ENTRY_SIZE_TOO_LARGE .params(zae.getName(), String.valueOf(zae.getSize())).getMessage(); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; } return true; }
From source file:ee.sk.digidoc.factory.SAXDigiDocFactory.java
/** * Reads in a DigiDoc file. One of fname or isSdoc must be given. * @param fname signed doc filename// w w w .j a v a2 s. co m * @param isSdoc opened stream with DigiDoc data * The user must open and close it. * @param errs list of errors to fill with parsing errors. If given * then attempt is made to continue parsing on errors and return them in this list. * If not given (null) then the first error found will be thrown. * @return signed document object if successfully parsed */ private SignedDoc readSignedDocOfType(String fname, InputStream isSdoc, boolean isBdoc, List errs) throws DigiDocException { // Use an instance of ourselves as the SAX event handler SAXDigiDocFactory handler = this; m_errs = errs; DigiDocVerifyFactory.initProvider(); SAXParserFactory factory = SAXParserFactory.newInstance(); if (m_logger.isDebugEnabled()) m_logger.debug("Start reading ddoc/bdoc " + ((fname != null) ? "from file: " + fname : "from stream") + " bdoc: " + isBdoc); if (fname == null && isSdoc == null) { throw new DigiDocException(DigiDocException.ERR_READ_FILE, "No input file", null); } if (fname != null) { File inFile = new File(fname); if (!inFile.canRead() || inFile.length() == 0) { throw new DigiDocException(DigiDocException.ERR_READ_FILE, "Empty or unreadable input file", null); } } ZipFile zf = null; ZipArchiveInputStream zis = null; ZipArchiveEntry ze = null; InputStream isEntry = null; File fTmp = null; try { factory.setFeature("http://xml.org/sax/features/external-general-entities", false); factory.setFeature("http://xml.org/sax/features/external-parameter-entities", false); if (isBdoc) { // bdoc parsing // must be a bdoc document ? m_doc = new SignedDoc(); m_doc.setVersion(SignedDoc.BDOC_VERSION_1_0); m_doc.setFormat(SignedDoc.FORMAT_BDOC); Enumeration eFiles = null; if (fname != null) { zf = new ZipFile(fname, "UTF-8"); eFiles = zf.getEntries(); } else if (isSdoc != null) { zis = new ZipArchiveInputStream(isSdoc, "UTF-8", true, true); } ArrayList lSigFnames = new ArrayList(); ArrayList lDataFnames = new ArrayList(); // read all entries boolean bHasMimetype = false, bManifest1 = false; int nFil = 0; while ((zf != null && eFiles.hasMoreElements()) || (zis != null && ((ze = zis.getNextZipEntry()) != null))) { nFil++; // read entry if (zf != null) { // ZipFile ze = (ZipArchiveEntry) eFiles.nextElement(); isEntry = zf.getInputStream(ze); } else { // ZipArchiveInputStream int n = 0, nTot = 0; if ((ze.getName().equals(FILE_MIMETYPE) || ze.getName().equals(FILE_MANIFEST) || (ze.getName().startsWith(FILE_SIGNATURES) && ze.getName().endsWith(".xml"))) || (nMaxBdocFilCached <= 0 || (ze.getSize() < nMaxBdocFilCached && ze.getSize() >= 0))) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); byte[] data = new byte[2048]; while ((n = zis.read(data)) > 0) { bos.write(data, 0, n); nTot += n; } if (m_logger.isDebugEnabled()) m_logger.debug("Read: " + nTot + " bytes from zip"); data = bos.toByteArray(); bos = null; isEntry = new ByteArrayInputStream(data); } else { File fCacheDir = new File(ConfigManager.instance().getStringProperty( "DIGIDOC_DF_CACHE_DIR", System.getProperty("java.io.tmpdir"))); fTmp = File.createTempFile("bdoc-data", ".tmp", fCacheDir); FileOutputStream fos = new FileOutputStream(fTmp); byte[] data = new byte[2048]; while ((n = zis.read(data)) > 0) { fos.write(data, 0, n); nTot += n; } if (m_logger.isDebugEnabled()) m_logger.debug("Read: " + nTot + " bytes from zip to: " + fTmp.getAbsolutePath()); fos.close(); isEntry = new FileInputStream(fTmp); } } if (m_logger.isDebugEnabled()) m_logger.debug("Entry: " + ze.getName() + " nlen: " + ze.getName().length() + " size: " + ze.getSize() + " dir: " + ze.isDirectory() + " comp-size: " + ze.getCompressedSize()); // mimetype file if (ze.getName().equals(FILE_MIMETYPE)) { if (m_logger.isDebugEnabled()) m_logger.debug("Check mimetype!"); checkBdocMimetype(isEntry); bHasMimetype = true; m_doc.setComment(ze.getComment()); if (nFil != 1) { m_logger.error("mimetype file is " + nFil + " file but must be first"); handleError(new DigiDocException(DigiDocException.ERR_DIGIDOC_BADXML, "mimetype file is not first zip entry", null)); } } else if (ze.getName().equals(FILE_MANIFEST)) { // manifest.xml file if (m_logger.isDebugEnabled()) m_logger.debug("Read manifest"); if (!bManifest1 && isEntry != null) { bManifest1 = true; BdocManifestParser mfparser = new BdocManifestParser(m_doc); mfparser.readManifest(isEntry); } else { m_logger.error("Found multiple manifest.xml files!"); throw new DigiDocException(DigiDocException.ERR_MULTIPLE_MANIFEST_FILES, "Found multiple manifest.xml files!", null); } } else if (ze.getName().startsWith(FILE_SIGNATURES) && ze.getName().endsWith(".xml")) { // some signature m_fileName = ze.getName(); if (m_logger.isDebugEnabled()) m_logger.debug("Reading bdoc siganture: " + m_fileName); boolean bExists = false; for (int j = 0; j < lSigFnames.size(); j++) { String s1 = (String) lSigFnames.get(j); if (s1.equals(m_fileName)) bExists = true; } if (bExists) { m_logger.error("Duplicate signature filename: " + m_fileName); handleError(new DigiDocException(DigiDocException.ERR_DIGIDOC_BADXML, "Duplicate signature filename: " + m_fileName, null)); } else lSigFnames.add(m_fileName); SAXParser saxParser = factory.newSAXParser(); ByteArrayOutputStream bos = new ByteArrayOutputStream(); int n = 0; byte[] data = new byte[2048]; while ((n = isEntry.read(data)) > 0) bos.write(data, 0, n); data = bos.toByteArray(); bos = null; if (m_logger.isDebugEnabled()) m_logger.debug( "Parsing bdoc: " + m_fileName + " size: " + ((data != null) ? data.length : 0)); saxParser.parse(new SignatureInputStream(new ByteArrayInputStream(data)), this); if (m_logger.isDebugEnabled()) m_logger.debug("Parsed bdoc: " + m_fileName); Signature sig1 = m_doc.getLastSignature(); m_sigComment = ze.getComment(); if (sig1 != null) { sig1.setPath(m_fileName); sig1.setComment(ze.getComment()); } } else { // probably a data file if (m_logger.isDebugEnabled()) m_logger.debug("Read data file: " + ze.getName()); if (!ze.isDirectory()) { boolean bExists = false; for (int j = 0; j < lDataFnames.size(); j++) { String s1 = (String) lDataFnames.get(j); if (s1.equals(ze.getName())) bExists = true; } if (bExists) { m_logger.error("Duplicate datafile filename: " + ze.getName()); handleError(new DigiDocException(DigiDocException.ERR_DIGIDOC_BADXML, "Duplicate datafile filename: " + ze.getName(), null)); } else lDataFnames.add(ze.getName()); DataFile df = m_doc.findDataFileById(ze.getName()); if (df != null) { if (ze.getSize() > 0) df.setSize(ze.getSize()); df.setContentType(DataFile.CONTENT_BINARY); df.setFileName(ze.getName()); } else { df = new DataFile(ze.getName(), DataFile.CONTENT_BINARY, ze.getName(), "application/binary", m_doc); if (m_doc.getDataFiles() == null) m_doc.setDataFiles(new ArrayList()); m_doc.getDataFiles().add(df); //m_doc.addDataFile(df); // this does some intiailization work unnecessary here } // enable caching if requested if (isEntry != null) df.setOrCacheBodyAndCalcHashes(isEntry); df.setComment(ze.getComment()); df.setLastModDt(new Date(ze.getTime())); // fix mime type according to DataObjectFormat Signature sig1 = m_doc.getLastSignature(); if (sig1 != null) { Reference dRef = sig1.getSignedInfo().getReferenceForDataFile(df); if (dRef != null) { DataObjectFormat dof = sig1.getSignedInfo() .getDataObjectFormatForReference(dRef); if (dof != null) { df.setMimeType(dof.getMimeType()); } } } } } if (fTmp != null) { fTmp.delete(); fTmp = null; } } // while zip entries if (!bHasMimetype) { m_logger.error("No mimetype file"); handleError(new DigiDocException(DigiDocException.ERR_DIGIDOC_BADXML, "Not a BDOC format file! No mimetype file!", null)); } // if no signatures exist then copy mime-type from manifest.xml to DataFile -s if (m_doc.countSignatures() == 0) { for (int i = 0; i < m_doc.countDataFiles(); i++) { DataFile df = m_doc.getDataFile(i); if (m_doc.getManifest() != null) { for (int j = 0; j < m_doc.getManifest().getNumFileEntries(); j++) { ManifestFileEntry mfe = m_doc.getManifest().getFileEntry(j); if (mfe.getFullPath() != null && mfe.getFullPath().equals(df.getFileName())) { df.setMimeType(mfe.getMediaType()); } // if fullpath } // for } // if } // for i } } else { // ddoc parsing if (m_logger.isDebugEnabled()) m_logger.debug("Reading ddoc: " + fname + " file: " + m_fileName); m_fileName = fname; SAXParser saxParser = factory.newSAXParser(); if (fname != null) saxParser.parse(new SignatureInputStream(new FileInputStream(fname)), this); else if (isSdoc != null) saxParser.parse(isSdoc, this); } } catch (org.xml.sax.SAXParseException ex) { m_logger.error("SAX Error: " + ex); handleError(ex); } catch (Exception ex) { m_logger.error("Error reading3: " + ex); ex.printStackTrace(); /*if(ex instanceof DigiDocException){ DigiDocException dex = (DigiDocException)ex; m_logger.error("Dex: " + ex); if(dex.getNestedException() != null) { dex.getNestedException().printStackTrace(); m_logger.error("Trace: "); } }*/ handleError(ex); } finally { // cleanup try { if (isEntry != null) { isEntry.close(); isEntry = null; } if (zis != null) zis.close(); if (zf != null) zf.close(); if (fTmp != null) { fTmp.delete(); fTmp = null; } } catch (Exception ex) { m_logger.error("Error closing streams and files: " + ex); } } // compare Manifest and DataFiles boolean bErrList = (errs != null); if (errs == null) errs = new ArrayList(); boolean bOk = DigiDocVerifyFactory.verifyManifestEntries(m_doc, errs); if (m_doc == null) { m_logger.error("Error reading4: doc == null"); handleError(new DigiDocException(DigiDocException.ERR_DIGIDOC_BADXML, "This document is not in ddoc or bdoc format", null)); } if (!bErrList && errs.size() > 0) { // if error list was not used then we have to throw exception. So we will throw the first one since we can only do it once DigiDocException ex = (DigiDocException) errs.get(0); throw ex; } return m_doc; }