List of usage examples for org.apache.commons.compress.archivers.zip ZipArchiveEntry isDirectory
public boolean isDirectory()
From source file:at.spardat.xma.xdelta.JarDelta.java
/** * Compute delta.// www . ja v a 2 s . c om * * @param source the source * @param target the target * @param output the output * @param list the list * @param prefix the prefix * @throws IOException Signals that an I/O exception has occurred. */ public void computeDelta(ZipFile source, ZipFile target, ZipArchiveOutputStream output, PrintWriter list, String prefix) throws IOException { try { for (Enumeration<ZipArchiveEntry> enumer = target.getEntries(); enumer.hasMoreElements();) { calculatedDelta = null; ZipArchiveEntry targetEntry = enumer.nextElement(); ZipArchiveEntry sourceEntry = findBestSource(source, target, targetEntry); String nextEntryName = prefix + targetEntry.getName(); if (sourceEntry != null && zipFilesPattern.matcher(sourceEntry.getName()).matches() && !equal(sourceEntry, targetEntry)) { nextEntryName += "!"; } nextEntryName += "|" + Long.toHexString(targetEntry.getCrc()); if (sourceEntry != null) { nextEntryName += ":" + Long.toHexString(sourceEntry.getCrc()); } else { nextEntryName += ":0"; } list.println(nextEntryName); if (targetEntry.isDirectory()) { if (sourceEntry == null) { ZipArchiveEntry outputEntry = entryToNewName(targetEntry, prefix + targetEntry.getName()); output.putArchiveEntry(outputEntry); output.closeArchiveEntry(); } } else { if (sourceEntry == null || sourceEntry.getSize() <= Delta.DEFAULT_CHUNK_SIZE || targetEntry.getSize() <= Delta.DEFAULT_CHUNK_SIZE) { // new Entry od. alter Eintrag od. neuer Eintrag leer ZipArchiveEntry outputEntry = entryToNewName(targetEntry, prefix + targetEntry.getName()); output.putArchiveEntry(outputEntry); try (InputStream in = target.getInputStream(targetEntry)) { int read = 0; while (-1 < (read = in.read(buffer))) { output.write(buffer, 0, read); } output.flush(); } output.closeArchiveEntry(); } else { if (!equal(sourceEntry, targetEntry)) { if (zipFilesPattern.matcher(sourceEntry.getName()).matches()) { File embeddedTarget = File.createTempFile("jardelta-tmp", ".zip"); File embeddedSource = File.createTempFile("jardelta-tmp", ".zip"); try (FileOutputStream out = new FileOutputStream(embeddedSource); InputStream in = source.getInputStream(sourceEntry); FileOutputStream out2 = new FileOutputStream(embeddedTarget); InputStream in2 = target.getInputStream(targetEntry)) { int read = 0; while (-1 < (read = in.read(buffer))) { out.write(buffer, 0, read); } out.flush(); read = 0; while (-1 < (read = in2.read(buffer))) { out2.write(buffer, 0, read); } out2.flush(); computeDelta(new ZipFile(embeddedSource), new ZipFile(embeddedTarget), output, list, prefix + sourceEntry.getName() + "!"); } finally { embeddedSource.delete(); embeddedTarget.delete(); } } else { ZipArchiveEntry outputEntry = new ZipArchiveEntry( prefix + targetEntry.getName() + ".gdiff"); outputEntry.setTime(targetEntry.getTime()); outputEntry.setComment("" + targetEntry.getCrc()); output.putArchiveEntry(outputEntry); if (calculatedDelta != null) { output.write(calculatedDelta); output.flush(); } else { try (ByteArrayOutputStream outbytes = new ByteArrayOutputStream()) { Delta d = new Delta(); DiffWriter diffWriter = new GDiffWriter(new DataOutputStream(outbytes)); int sourceSize = (int) sourceEntry.getSize(); byte[] sourceBytes = new byte[sourceSize]; try (InputStream sourceStream = source.getInputStream(sourceEntry)) { for (int erg = sourceStream.read( sourceBytes); erg < sourceBytes.length; erg += sourceStream .read(sourceBytes, erg, sourceBytes.length - erg)) ; } d.compute(sourceBytes, target.getInputStream(targetEntry), diffWriter); output.write(outbytes.toByteArray()); } } output.closeArchiveEntry(); } } } } } } finally { source.close(); target.close(); } }
From source file:io.personium.core.bar.BarFileReadRunner.java
/** * bar???.//from www . j av a2s . c om */ public void run() { boolean isSuccess = true; String path = "/" + this.cell.getName() + "/" + boxName + "/"; log.debug("install target: " + path); try { List<String> doneKeys = new ArrayList<String>(); try { this.zipArchiveInputStream = new ZipArchiveInputStream(new FileInputStream(barFile)); } catch (IOException e) { throw PersoniumCoreException.Server.FILE_SYSTEM_ERROR.params(e.getMessage()); } // ("bar/")?? if (!isRootDir()) { String message = PersoniumCoreMessageUtils.getMessage("PL-BI-2001"); writeOutputStream(true, "PL-BI-1004", ROOT_DIR, message); isSuccess = false; return; } // 00_meta?? if (!isMetadataDir()) { String message = PersoniumCoreMessageUtils.getMessage("PL-BI-2001"); writeOutputStream(true, "PL-BI-1004", META_DIR, message); isSuccess = false; return; } // 00_meta??? ZipArchiveEntry zae = null; try { long maxBarEntryFileSize = getMaxBarEntryFileSize(); Set<String> keyList = barFileOrder.keySet(); while ((zae = this.zipArchiveInputStream.getNextZipEntry()) != null) { String entryName = zae.getName(); log.debug("Entry Name: " + entryName); log.debug("Entry Size: " + zae.getSize()); log.debug("Entry Compressed Size: " + zae.getCompressedSize()); if (!zae.isDirectory()) { this.progressInfo.addDelta(1L); } // bar?? isSuccess = createMetadata(zae, entryName, maxBarEntryFileSize, keyList, doneKeys); if (!isSuccess) { break; } // 90_contents?????? if (isContentsDir(zae)) { if (davCmpMap.isEmpty()) { writeOutputStream(true, "PL-BI-1004", zae.getName()); isSuccess = false; } else { writeOutputStream(false, "PL-BI-1003", zae.getName()); } doneKeys.add(zae.getName()); break; } } } catch (IOException ex) { isSuccess = false; log.info("IOException: " + ex.getMessage(), ex.fillInStackTrace()); } // 90_contents()??? if (isSuccess && isContentsDir(zae)) { isSuccess = createContents(); } // ?????????? // ??????? if (isSuccess) { Set<String> filenameList = barFileOrder.keySet(); for (String filename : filenameList) { Boolean isNecessary = barFileOrder.get(filename); if (isNecessary && !doneKeys.contains(filename)) { String message = PersoniumCoreMessageUtils.getMessage("PL-BI-2001"); writeOutputStream(true, "PL-BI-1004", filename, message); isSuccess = false; } } } } catch (Throwable ex) { isSuccess = false; String message = getErrorMessage(ex); log.info("Exception: " + message, ex.fillInStackTrace()); writeOutputStream(true, "PL-BI-1005", "", message); } finally { if (isSuccess) { writeOutputStream(false, CODE_BAR_INSTALL_COMPLETED, this.cell.getUrl() + boxName, ""); this.progressInfo.setStatus(ProgressInfo.STATUS.COMPLETED); } else { String message = PersoniumCoreMessageUtils.getMessage("PL-BI-2001"); writeOutputStream(false, CODE_BAR_INSTALL_FAILED, this.cell.getUrl() + boxName, message); this.progressInfo.setStatus(ProgressInfo.STATUS.FAILED); } this.progressInfo.setEndTime(); writeToProgressCache(true); IOUtils.closeQuietly(this.zipArchiveInputStream); if (this.barFile.exists() && !this.barFile.delete()) { log.warn("Failed to remove bar file. [" + this.barFile.getAbsolutePath() + "]."); } } }
From source file:com.fujitsu.dc.core.bar.BarFileReadRunner.java
/** * bar???./*from w w w. ja v a 2s.c om*/ */ public void run() { boolean isSuccess = true; String path = "/" + this.cell.getName() + "/" + boxName + "/"; log.debug("install target: " + path); try { List<String> doneKeys = new ArrayList<String>(); try { this.zipArchiveInputStream = new ZipArchiveInputStream(new FileInputStream(barFile)); } catch (IOException e) { throw DcCoreException.Server.FILE_SYSTEM_ERROR.params(e.getMessage()); } // ("bar/")?? if (!isRootDir()) { String message = DcCoreMessageUtils.getMessage("PL-BI-2001"); writeOutputStream(true, "PL-BI-1004", ROOT_DIR, message); isSuccess = false; return; } // 00_meta?? if (!isMetadataDir()) { String message = DcCoreMessageUtils.getMessage("PL-BI-2001"); writeOutputStream(true, "PL-BI-1004", META_DIR, message); isSuccess = false; return; } // 00_meta??? ZipArchiveEntry zae = null; try { long maxBarEntryFileSize = getMaxBarEntryFileSize(); Set<String> keyList = barFileOrder.keySet(); while ((zae = this.zipArchiveInputStream.getNextZipEntry()) != null) { String entryName = zae.getName(); log.debug("Entry Name: " + entryName); log.debug("Entry Size: " + zae.getSize()); log.debug("Entry Compressed Size: " + zae.getCompressedSize()); if (!zae.isDirectory()) { this.progressInfo.addDelta(1L); } // bar?? isSuccess = createMetadata(zae, entryName, maxBarEntryFileSize, keyList, doneKeys); if (!isSuccess) { break; } // 90_contents?????? if (isContentsDir(zae)) { if (davCmpMap.isEmpty()) { writeOutputStream(true, "PL-BI-1004", zae.getName()); isSuccess = false; } else { writeOutputStream(false, "PL-BI-1003", zae.getName()); } doneKeys.add(zae.getName()); break; } } } catch (IOException ex) { isSuccess = false; log.info("IOException: " + ex.getMessage(), ex.fillInStackTrace()); } // 90_contents()??? if (isSuccess && isContentsDir(zae)) { isSuccess = createContents(); } // ?????????? // ??????? if (isSuccess) { Set<String> filenameList = barFileOrder.keySet(); for (String filename : filenameList) { Boolean isNecessary = barFileOrder.get(filename); if (isNecessary && !doneKeys.contains(filename)) { String message = DcCoreMessageUtils.getMessage("PL-BI-2001"); writeOutputStream(true, "PL-BI-1004", filename, message); isSuccess = false; } } } } catch (Throwable ex) { isSuccess = false; String message = getErrorMessage(ex); log.info("Exception: " + message, ex.fillInStackTrace()); writeOutputStream(true, "PL-BI-1005", "", message); } finally { if (isSuccess) { writeOutputStream(false, CODE_BAR_INSTALL_COMPLETED, this.cell.getUrl() + boxName, ""); this.progressInfo.setStatus(ProgressInfo.STATUS.COMPLETED); } else { String message = DcCoreMessageUtils.getMessage("PL-BI-2001"); writeOutputStream(false, CODE_BAR_INSTALL_FAILED, this.cell.getUrl() + boxName, message); this.progressInfo.setStatus(ProgressInfo.STATUS.FAILED); } this.progressInfo.setEndTime(); writeToProgressCache(true); IOUtils.closeQuietly(this.zipArchiveInputStream); if (this.barFile.exists() && !this.barFile.delete()) { log.warn("Failed to remove bar file. [" + this.barFile.getAbsolutePath() + "]."); } } }
From source file:cz.muni.fi.xklinec.zipstream.Mallory.java
/** * Entry point. /*w w w . jav a 2s .c om*/ * * @param args * @throws FileNotFoundException * @throws IOException * @throws NoSuchFieldException * @throws ClassNotFoundException * @throws NoSuchMethodException * @throws java.lang.InterruptedException * @throws java.lang.CloneNotSupportedException */ public void doMain(String[] args) throws FileNotFoundException, IOException, NoSuchFieldException, ClassNotFoundException, NoSuchMethodException, InterruptedException, CloneNotSupportedException { // command line argument parser CmdLineParser parser = new CmdLineParser(this); // if you have a wider console, you could increase the value; // here 80 is also the default parser.setUsageWidth(80); try { // parse the arguments. parser.parseArgument(args); } catch (CmdLineException e) { // if there's a problem in the command line, // you'll get this exception. this will report // an error message. System.err.println(e.getMessage()); System.err.println("java Mallory [options...] arguments..."); // print the list of available options parser.printUsage(System.err); System.err.println(); // print option sample. This is useful some time System.err.println(" Example: java Mallory " + parser.printExample(ExampleMode.ALL)); return; } if (arguments.size() == 2) { final String a0 = arguments.get(0); final String a1 = arguments.get(1); if (!quiet) System.err .println(String.format("Will use file [%s] as input file and [%s] as output file", a0, a1)); fis = new FileInputStream(a0); fos = new FileOutputStream(a1); } else if (arguments.isEmpty()) { if (!quiet) System.err .println(String.format("Will use file [STDIN] as input file and [STDOUT] as output file")); fis = System.in; fos = System.out; } else { if (!quiet) System.err.println("I do not understand the usage."); return; } if (zipAlign) { System.err.println("WARNING: ZIP Align feature not implemented yet..."); return; } // Deflater to re-compress uncompressed data read from ZIP stream. def = new Deflater(9, true); sentFiles = new HashSet<String>(); // Buffer input stream so input stream is read in chunks bis = new BufferedInputStream(fis); bos = new BufferedOutputStream(fos); // Effective temporary dir - if separate is required if (separateTempDir) { effectiveTempDir = File.createTempFile("temp_apk_dir_", "", new File(TEMP_DIR)); effectiveTempDir.delete(); effectiveTempDir.mkdir(); } else { effectiveTempDir = new File(TEMP_DIR); } // Generate temporary APK filename tempApk = File.createTempFile("temp_apk_", ".apk", effectiveTempDir); if (tempApk.canWrite() == false) { throw new IOException("Temp file is not writable!"); } FileOutputStream tos = new FileOutputStream(tempApk); // What we want here is to read input stream from the socket/pipe // whatever, process it in ZIP logic and simultaneously to copy // all read data to the temporary file - this reminds tee command // logic. This functionality can be found in TeeInputStream. TeeInputStream tis = new TeeInputStream(bis, tos); // Providing tis to ZipArchiveInputStream will copy all read data // to temporary tos file. zip = new ZipArchiveInputStream(tis); // List of all sent files, with data and hashes alMap = new HashMap<String, PostponedEntry>(); // Output stream // If there is defined slow down stream, it is used for user output to // mitigate tampering time gap. OutputStream osToUse = bos; SlowDownStream sdStream = null; if (slowDownStream) { // New slow down output stream with internal pipe buffer 15MB. sdStream = new SlowDownStream(osToUse, 15 * 1024 * 1024); // If size of the APK is known, use it to set slow down parameters correctly. if (apkSize > 0) { setSlowDownParams(); } if (!quiet) { System.err.println(String.format("Slown down stream will be used; apkSize=%d buffer=%d timeout=%d", apkSize, slowDownBuffer, slowDownTimeout)); } sdStream.setFlushBufferSize(slowDownBuffer); sdStream.setFlushBufferTimeout(slowDownTimeout); sdStream.start(); osToUse = sdStream; } zop = new ZipArchiveOutputStream(osToUse); zop.setLevel(9); if (!quiet) { System.err.println("Patterns that will be excluded:"); for (String regex : exclude) { System.err.println(" '" + regex + "'"); } System.err.println(); } // Read the archive ZipArchiveEntry ze = zip.getNextZipEntry(); while (ze != null) { ZipExtraField[] extra = ze.getExtraFields(true); byte[] lextra = ze.getLocalFileDataExtra(); UnparseableExtraFieldData uextra = ze.getUnparseableExtraFieldData(); byte[] uextrab = uextra != null ? uextra.getLocalFileDataData() : null; byte[] ex = ze.getExtra(); // ZipArchiveOutputStream.DEFLATED // // Data for entry byte[] byteData = Utils.readAll(zip); byte[] deflData = new byte[0]; int infl = byteData.length; int defl = 0; // If method is deflated, get the raw data (compress again). // Since ZIPStream automatically decompresses deflated files in read(). if (ze.getMethod() == ZipArchiveOutputStream.DEFLATED) { def.reset(); def.setInput(byteData); def.finish(); byte[] deflDataTmp = new byte[byteData.length * 2]; defl = def.deflate(deflDataTmp); deflData = new byte[defl]; System.arraycopy(deflDataTmp, 0, deflData, 0, defl); } if (!quiet) System.err.println(String.format( "ZipEntry: meth=%d " + "size=%010d isDir=%5s " + "compressed=%07d extra=%d lextra=%d uextra=%d ex=%d " + "comment=[%s] " + "dataDesc=%s " + "UTF8=%s " + "infl=%07d defl=%07d " + "name [%s]", ze.getMethod(), ze.getSize(), ze.isDirectory(), ze.getCompressedSize(), extra != null ? extra.length : -1, lextra != null ? lextra.length : -1, uextrab != null ? uextrab.length : -1, ex != null ? ex.length : -1, ze.getComment(), ze.getGeneralPurposeBit().usesDataDescriptor(), ze.getGeneralPurposeBit().usesUTF8ForNames(), infl, defl, ze.getName())); final String curName = ze.getName(); // Store zip entry to the map for later check after the APK is recompiled. // Hashes will be compared with the modified APK files after the process. PostponedEntry al = new PostponedEntry(ze, byteData, deflData); alMap.put(curName, al); // META-INF files should be always on the end of the archive, // thus add postponed files right before them if (isPostponed(ze)) { // Capturing interesting files for us and store for later. // If the file is not interesting, send directly to the stream. if (!quiet) System.err.println(" Interesting file, postpone sending!!!"); } else { // recompute CRC? if (recomputeCrc) { crc.reset(); crc.update(byteData); final long newCrc = crc.getValue(); if (!quiet && ze.getCrc() != newCrc && ze.getCrc() != -1) { System.err.println(" Warning: file CRC mismatch!!! Original: [" + ze.getCrc() + "] real: [" + newCrc + "]"); } ze.setCrc(newCrc); } // Write ZIP entry to the archive zop.putArchiveEntry(ze); // Add file data to the stream zop.write(byteData, 0, infl); zop.closeArchiveEntry(); zop.flush(); // Mark file as sent. addSent(curName); } ze = zip.getNextZipEntry(); } // Flush buffers zop.flush(); fos.flush(); // Cleaning up stuff, all reading streams can be closed now. zip.close(); bis.close(); fis.close(); tis.close(); tos.close(); // // APK is finished here, all non-interesting files were sent to the // zop strem (socket to the victim). Now APK transformation will // be performed, diff, sending rest of the files to zop. // boolean doPadding = paddExtra > 0 || outBytes > 0; long flen = tempApk.length(); if (outBytes <= 0) { outBytes = flen + paddExtra; } if (!quiet) { System.err.println("\nAPK reading finished, going to tamper downloaded " + " APK file [" + tempApk.toString() + "]; filezise=[" + flen + "]"); System.err.println(String.format("Sent so far: %d kB in %f %% after adding padding it is %f %%", zop.getWritten() / 1024, 100.0 * (double) zop.getWritten() / (double) flen, 100.0 * (double) zop.getWritten() / ((double) (outBytes > 0 ? outBytes : flen)))); } // New APK was generated, new filename = "tempApk_tampered" newApk = new File(outFile == null ? getFileName(tempApk.getAbsolutePath()) : outFile); if (cmd == null) { // Simulation of doing some evil stuff on the temporary apk Thread.sleep(3000); if (!quiet) System.err.println( "Tampered APK file: " + " [" + newApk.toString() + "]; filezise=[" + newApk.length() + "]"); // // Since no tampering was performed right now we will simulate it by just simple // copying the APK file // FileUtils.copyFile(tempApk, newApk); } else { try { // Execute command String cmd2exec; switch (cmdFormat) { case 0: cmd2exec = cmd + " " + tempApk.getAbsolutePath(); break; case 1: cmd2exec = cmd.replaceAll(INPUT_APK_PLACEHOLDER, tempApk.getAbsolutePath()); break; case 2: cmd2exec = cmd.replaceAll(INPUT_APK_PLACEHOLDER, tempApk.getAbsolutePath()); cmd2exec = cmd2exec.replaceAll(OUTPUT_APK_PLACEHOLDER, newApk.getAbsolutePath()); break; default: throw new IllegalArgumentException("Unknown command format number"); } if (!quiet) { System.err.println("Command to be executed: " + cmd2exec); System.err.println("\n<CMDOUTPUT>"); } long cmdStartTime = System.currentTimeMillis(); CmdExecutionResult resExec = execute(cmd2exec, OutputOpt.EXECUTE_STD_COMBINE, null, quiet ? null : System.err); long cmdStopTime = System.currentTimeMillis(); if (!quiet) { System.err.println("</CMDOUTPUT>\n"); System.err.println("Command executed. Return value: " + resExec.exitValue + "; tamperingTime=" + (cmdStopTime - cmdStartTime)); } } catch (IOException e) { if (!quiet) e.printStackTrace(System.err); } } // // Now read new APK file with ZipInputStream and push new/modified files to the ZOP. // fis = new FileInputStream(newApk); bis = new BufferedInputStream(fis); zip = new ZipArchiveInputStream(bis); // Merge tampered APK to the final, but in this first time // do it to the external buffer in order to get final apk size. // Backup ZOP state to the clonned instance. zop.flush(); long writtenBeforeDiff = zop.getWritten(); ZipArchiveOutputStream zop_back = zop; zop = zop.cloneThis(); // Set temporary byte array output stream, so original output stream is not // touched in this phase. ByteArrayOutputStream bbos = new ByteArrayOutputStream(); zop.setOut(bbos); mergeTamperedApk(false, false); zop.flush(); // Now output stream almost contains APK file, central directory is not written yet. long writtenAfterDiff = zop.getWritten(); if (!quiet) System.err.println(String.format("Tampered apk size yet; writtenBeforeDiff=%d writtenAfterDiff=%d", writtenBeforeDiff, writtenAfterDiff)); // Write central directory header to temporary buffer to discover its size. zop.writeFinish(); zop.flush(); bbos.flush(); // Read new values long writtenAfterCentralDir = zop.getWritten(); long centralDirLen = zop.getCdLength(); byte[] buffAfterMerge = bbos.toByteArray(); //int endOfCentralDir = (int) (buffAfterMerge.length - (writtenAfterCentralDir-writtenBeforeDiff)); long endOfCentralDir = END_OF_CENTRAL_DIR_SIZE; // Determine number of bytes to add to APK. // padlen is number of bytes missing in APK to meet desired size in bytes. padlen = doPadding ? (outBytes - (writtenAfterCentralDir + endOfCentralDir)) : 0; // Compute number of files needed for padding. int padfiles = (int) Math.ceil((double) padlen / (double) (PAD_BLOCK_MAX)); if (!quiet) System.err.println(String.format("Remaining to pad=%d, padfiles=%d " + "writtenAfterCentralDir=%d " + "centralDir=%d endOfCentralDir=%d centralDirOffset=%d " + "buffSize=%d total=%d desired=%d ", padlen, padfiles, writtenAfterCentralDir, centralDirLen, endOfCentralDir, zop.getCdOffset(), buffAfterMerge.length, writtenAfterCentralDir + endOfCentralDir, outBytes)); if (padlen < 0) { throw new IllegalStateException("Padlen cannot be negative, please increase padding size"); } // Close input streams for tampered APK try { zip.close(); bis.close(); fis.close(); } catch (Exception e) { if (!quiet) e.printStackTrace(System.err); } // Merge again, now with pre-defined padding size. fis = new FileInputStream(newApk); bis = new BufferedInputStream(fis); zip = new ZipArchiveInputStream(bis); // Revert changes - use clonned writer stream. zop = zop_back; long writtenBeforeDiff2 = zop.getWritten(); // Merge tampered APK, now for real, now with computed padding. mergeTamperedApk(true, true); zop.flush(); long writtenAfterMerge2 = zop.getWritten(); // Finish really zop.finish(); zop.flush(); long writtenReally = zop.getWritten(); long centralDirLen2 = zop.getCdLength(); if (!quiet) System.err.println(String.format( "Write stats; " + "writtenBeforeDiff=%d writtenAfterDiff=%d " + "writtenAfterCentralDir=%d centralDir=%d endOfCd=%d centralDirOffset=%d " + "padlen=%d total=%d desired=%d", writtenBeforeDiff2, writtenAfterMerge2, writtenReally, centralDirLen2, endOfCentralDir, zop.getCdOffset(), padlen, writtenReally + endOfCentralDir, outBytes)); // Will definitelly close (and finish if not yet) ZOP stream // and close underlying stream. zop.close(); if (sdStream != null) { if (!quiet) { System.err.println("Waiting for sdStream to finish..."); } // Wait for stream to finish dumping with pre-set speed, if it takes // too long (1 minute) switch slown down stream to dumping mode // without any waiting. long startedDump = System.currentTimeMillis(); while (sdStream.isRunning()) { long curTime = System.currentTimeMillis(); if (startedDump != -1 && (curTime - startedDump) > 1000 * 120) { startedDump = -1; sdStream.flushPipes(); } Thread.sleep(10); } if (!quiet) { System.err.println("SD stream finished, terminating..."); } } // Should always be same if (!quiet && doPadding && writtenBeforeDiff != writtenBeforeDiff2) { System.err.println(String.format("Warning! Size before merge from pass1 and pass2 does not match.")); } // If size is different, something went wrong. if (!quiet && doPadding && ((writtenReally + endOfCentralDir) != outBytes)) { System.err.println(String.format("Warning! Output size differs from desired size.")); } bos.close(); fos.close(); // Delete temporary files if required if (deleteArtefacts) { try { if (newApk.exists()) { newApk.delete(); if (!quiet) System.err.println("Tampered APK removed. " + newApk.getAbsolutePath()); } if (tempApk.exists()) { tempApk.delete(); if (!quiet) System.err.println("Original APK removed. " + tempApk.getAbsolutePath()); } if (separateTempDir && effectiveTempDir.exists()) { FileUtils.deleteDirectory(effectiveTempDir); if (!quiet) System.err.println("Temporary directory removed. " + effectiveTempDir.getAbsolutePath()); } if (!quiet) System.err.println("Temporary files were removed."); } catch (IOException e) { if (!quiet) e.printStackTrace(System.err); } } if (!quiet) System.err.println("THE END!"); }
From source file:io.personium.core.bar.BarFileReadRunner.java
/** * bar?(bar/90_contents)1????./*from w ww .j a v a2 s.co m*/ * @return boolean ???? */ protected boolean createContents() { boolean isSuccess = true; // Collection???Map?????? Map<String, DavCmp> odataCols = getCollections(DavCmp.TYPE_COL_ODATA); Map<String, DavCmp> webdavCols = getCollections(DavCmp.TYPE_COL_WEBDAV); Map<String, DavCmp> serviceCols = getCollections(DavCmp.TYPE_COL_SVC); DavCmp davCmp = null; List<String> doneKeys = new ArrayList<String>(); try { ZipArchiveEntry zae = null; String currentPath = null; int userDataCount = 0; List<JSONMappedObject> userDataLinks = new ArrayList<JSONMappedObject>(); LinkedHashMap<String, BulkRequest> bulkRequests = new LinkedHashMap<String, BulkRequest>(); Map<String, String> fileNameMap = new HashMap<String, String>(); PersoniumODataProducer producer = null; while ((zae = this.zipArchiveInputStream.getNextZipEntry()) != null) { String entryName = zae.getName(); log.debug("Entry Name: " + entryName); log.debug("Entry Size: " + zae.getSize()); log.debug("Entry Compressed Size: " + zae.getCompressedSize()); if (!zae.isDirectory()) { this.progressInfo.addDelta(1L); } writeOutputStream(false, CODE_BAR_INSTALL_STARTED, entryName); // ODataCollection?Dav/ServiceCollection/ODataCollection????????? // ?????????? if (currentPath != null && !entryName.startsWith(currentPath)) { if (!execBulkRequest(davCmp.getCell().getId(), bulkRequests, fileNameMap, producer)) { return false; } if (!createUserdataLinks(producer, userDataLinks)) { return false; } userDataLinks = new ArrayList<JSONMappedObject>(); currentPath = null; } int entryType = getEntryType(entryName, odataCols, webdavCols, serviceCols, this.davFileMap); switch (entryType) { case TYPE_ODATA_COLLECTION: // OData? if (!odataCols.isEmpty()) { if (!isValidODataContents(entryName, odataCols, doneKeys)) { return false; } Pattern formatPattern = Pattern.compile(CONTENTS_DIR + ".+/90_data/"); Matcher formatMatcher = formatPattern.matcher(entryName); if (formatMatcher.matches()) { currentPath = entryName; } Pattern userodataDirPattern = Pattern.compile(CONTENTS_DIR + ".+/90_data/.+"); Matcher userodataDirMatcher = userodataDirPattern.matcher(entryName); if (getFileExtension(entryName).equals(".xml")) { // 00_$metadata.xml?? davCmp = getCollection(entryName, odataCols); // OData???????? producer = davCmp.getODataProducer(); if (!registUserSchema(entryName, this.zipArchiveInputStream, davCmp)) { doneKeys.add(entryName); return false; } writeOutputStream(false, "PL-BI-1003", entryName); doneKeys.add(entryName); continue; } else if (entryName.endsWith(USERDATA_LINKS_JSON)) { userDataLinks = registJsonLinksUserdata(entryName, this.zipArchiveInputStream); if (userDataLinks == null) { doneKeys.add(entryName); return false; } writeOutputStream(false, "PL-BI-1003", entryName); doneKeys.add(entryName); continue; } else if (userodataDirMatcher.matches() && getFileExtension(entryName).equals(".json")) { userDataCount++; if (!setBulkRequests(entryName, producer, bulkRequests, fileNameMap)) { return false; } doneKeys.add(entryName); if ((userDataCount % bulkSize) == 0 && !execBulkRequest(davCmp.getCell().getId(), bulkRequests, fileNameMap, producer)) { return false; } continue; } else if (!entryName.endsWith("/")) { // xml,json????????? String message = PersoniumCoreMessageUtils.getMessage("PL-BI-2001"); log.info(message + " [" + entryName + "]"); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; } } break; case TYPE_DAV_FILE: // WebDAV? // bar/90_contents/{davcol_name}??1???? if (!registWebDavFile(entryName, this.zipArchiveInputStream, webdavCols)) { return false; } break; case TYPE_SVC_FILE: // Service? if (!installSvcCollection(webdavCols, entryName)) { return false; } break; case TYPE_MISMATCH: // OData??????????rootprops?????? String message = PersoniumCoreMessageUtils.getMessage("PL-BI-2006"); log.info(message + " [" + entryName + "]"); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; default: break; } writeOutputStream(false, "PL-BI-1003", entryName); doneKeys.add(entryName); } // ODataCollection?????????????????? if (currentPath != null) { if (!execBulkRequest(davCmp.getCell().getId(), bulkRequests, fileNameMap, producer)) { return false; } if (!createUserdataLinks(producer, userDataLinks)) { return false; } userDataLinks = null; } } catch (IOException ex) { isSuccess = false; log.info("IOException: " + ex.getMessage(), ex.fillInStackTrace()); String message = PersoniumCoreMessageUtils.getMessage("PL-BI-2000"); writeOutputStream(true, CODE_BAR_INSTALL_FAILED, "", message); } // bar/90_contents/{odatacol_name}/00_$metadata.xml)?? isSuccess = checkNecessaryFile(isSuccess, odataCols, doneKeys); return isSuccess; }
From source file:com.fujitsu.dc.core.bar.BarFileReadRunner.java
/** * bar?(bar/90_contents)1????.//from www . j av a 2 s. co m * @return boolean ???? */ protected boolean createContents() { boolean isSuccess = true; // Collection???Map?????? Map<String, DavCmpEsImpl> odataCols = getCollections(DavCmp.TYPE_COL_ODATA); Map<String, DavCmpEsImpl> webdavCols = getCollections(DavCmp.TYPE_COL_WEBDAV); Map<String, DavCmpEsImpl> serviceCols = getCollections(DavCmp.TYPE_COL_SVC); DavCmpEsImpl davCmp = null; List<String> doneKeys = new ArrayList<String>(); try { ZipArchiveEntry zae = null; String currentPath = null; int userDataCount = 0; List<JSONMappedObject> userDataLinks = new ArrayList<JSONMappedObject>(); LinkedHashMap<String, BulkRequest> bulkRequests = new LinkedHashMap<String, BulkRequest>(); Map<String, String> fileNameMap = new HashMap<String, String>(); DcODataProducer producer = null; while ((zae = this.zipArchiveInputStream.getNextZipEntry()) != null) { String entryName = zae.getName(); log.debug("Entry Name: " + entryName); log.debug("Entry Size: " + zae.getSize()); log.debug("Entry Compressed Size: " + zae.getCompressedSize()); if (!zae.isDirectory()) { this.progressInfo.addDelta(1L); } writeOutputStream(false, CODE_BAR_INSTALL_STARTED, entryName); // ODataCollection?Dav/ServiceCollection/ODataCollection????????? // ?????????? if (currentPath != null && !entryName.startsWith(currentPath)) { if (!execBulkRequest(davCmp.getCellId(), bulkRequests, fileNameMap, producer)) { return false; } if (!createUserdataLinks(producer, userDataLinks)) { return false; } userDataLinks = new ArrayList<JSONMappedObject>(); currentPath = null; } int entryType = getEntryType(entryName, odataCols, webdavCols, serviceCols, this.davFileMap); switch (entryType) { case TYPE_ODATA_COLLECTION: // OData? if (!odataCols.isEmpty()) { if (!isValidODataContents(entryName, odataCols, doneKeys)) { return false; } Pattern formatPattern = Pattern.compile(CONTENTS_DIR + ".+/90_data/"); Matcher formatMatcher = formatPattern.matcher(entryName); if (formatMatcher.matches()) { currentPath = entryName; } Pattern userodataDirPattern = Pattern.compile(CONTENTS_DIR + ".+/90_data/.+"); Matcher userodataDirMatcher = userodataDirPattern.matcher(entryName); if (getFileExtension(entryName).equals(".xml")) { // 00_$metadata.xml?? davCmp = getCollection(entryName, odataCols); // OData???????? producer = davCmp.getODataProducer(); if (!registUserSchema(entryName, this.zipArchiveInputStream, davCmp)) { doneKeys.add(entryName); return false; } writeOutputStream(false, "PL-BI-1003", entryName); doneKeys.add(entryName); continue; } else if (entryName.endsWith(USERDATA_LINKS_JSON)) { userDataLinks = registJsonLinksUserdata(entryName, this.zipArchiveInputStream); if (userDataLinks == null) { doneKeys.add(entryName); return false; } writeOutputStream(false, "PL-BI-1003", entryName); doneKeys.add(entryName); continue; } else if (userodataDirMatcher.matches() && getFileExtension(entryName).equals(".json")) { userDataCount++; if (!setBulkRequests(entryName, producer, bulkRequests, fileNameMap)) { return false; } doneKeys.add(entryName); if ((userDataCount % bulkSize) == 0 && !execBulkRequest(davCmp.getCellId(), bulkRequests, fileNameMap, producer)) { return false; } continue; } else if (!entryName.endsWith("/")) { // xml,json????????? String message = DcCoreMessageUtils.getMessage("PL-BI-2001"); log.info(message + " [" + entryName + "]"); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; } } break; case TYPE_DAV_FILE: // WebDAV? // bar/90_contents/{davcol_name}??1???? if (!registWebDavFile(entryName, this.zipArchiveInputStream, webdavCols)) { return false; } break; case TYPE_SVC_FILE: // Service? if (!installSvcCollection(webdavCols, entryName)) { return false; } break; case TYPE_MISMATCH: // OData??????????rootprops?????? String message = DcCoreMessageUtils.getMessage("PL-BI-2006"); log.info(message + " [" + entryName + "]"); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; default: break; } writeOutputStream(false, "PL-BI-1003", entryName); doneKeys.add(entryName); } // ODataCollection?????????????????? if (currentPath != null) { if (!execBulkRequest(davCmp.getCellId(), bulkRequests, fileNameMap, producer)) { return false; } if (!createUserdataLinks(producer, userDataLinks)) { return false; } userDataLinks = null; } } catch (IOException ex) { isSuccess = false; log.info("IOException: " + ex.getMessage(), ex.fillInStackTrace()); String message = DcCoreMessageUtils.getMessage("PL-BI-2000"); writeOutputStream(true, CODE_BAR_INSTALL_FAILED, "", message); } // bar/90_contents/{odatacol_name}/00_$metadata.xml)?? isSuccess = checkNecessaryFile(isSuccess, odataCols, doneKeys); return isSuccess; }
From source file:divconq.tool.Updater.java
static public boolean tryUpdate() { @SuppressWarnings("resource") final Scanner scan = new Scanner(System.in); FuncResult<RecordStruct> ldres = Updater.loadDeployed(); if (ldres.hasErrors()) { System.out.println("Error reading deployed.json file: " + ldres.getMessage()); return false; }/*from w w w . j a va 2s.c o m*/ RecordStruct deployed = ldres.getResult(); String ver = deployed.getFieldAsString("Version"); String packfolder = deployed.getFieldAsString("PackageFolder"); String packprefix = deployed.getFieldAsString("PackagePrefix"); if (StringUtil.isEmpty(ver) || StringUtil.isEmpty(packfolder)) { System.out.println("Error reading deployed.json file: Missing Version or PackageFolder"); return false; } if (StringUtil.isEmpty(packprefix)) packprefix = "DivConq"; System.out.println("Current Version: " + ver); Path packpath = Paths.get(packfolder); if (!Files.exists(packpath) || !Files.isDirectory(packpath)) { System.out.println("Error reading PackageFolder - it may not exist or is not a folder."); return false; } File pp = packpath.toFile(); RecordStruct deployment = null; File matchpack = null; for (File f : pp.listFiles()) { if (!f.getName().startsWith(packprefix + "-") || !f.getName().endsWith("-bin.zip")) continue; System.out.println("Checking: " + f.getName()); // if not a match before, clear this deployment = null; try { ZipFile zf = new ZipFile(f); Enumeration<ZipArchiveEntry> entries = zf.getEntries(); while (entries.hasMoreElements()) { ZipArchiveEntry entry = entries.nextElement(); if (entry.getName().equals("deployment.json")) { //System.out.println("crc: " + entry.getCrc()); FuncResult<CompositeStruct> pres = CompositeParser.parseJson(zf.getInputStream(entry)); if (pres.hasErrors()) { System.out.println("Error reading deployment.json file"); break; } deployment = (RecordStruct) pres.getResult(); break; } } zf.close(); } catch (IOException x) { System.out.println("Error reading deployment.json file: " + x); } if (deployment != null) { String fndver = deployment.getFieldAsString("Version"); String fnddependson = deployment.getFieldAsString("DependsOn"); if (ver.equals(fnddependson)) { System.out.println("Found update: " + fndver); matchpack = f; break; } } } if ((matchpack == null) || (deployment == null)) { System.out.println("No updates found!"); return false; } String fndver = deployment.getFieldAsString("Version"); String umsg = deployment.getFieldAsString("UpdateMessage"); if (StringUtil.isNotEmpty(umsg)) { System.out.println("========================================================================"); System.out.println(umsg); System.out.println("========================================================================"); } System.out.println(); System.out.println("Do you want to install? (y/n)"); System.out.println(); String p = scan.nextLine().toLowerCase(); if (!p.equals("y")) return false; System.out.println(); System.out.println("Intalling: " + fndver); Set<String> ignorepaths = new HashSet<>(); ListStruct iplist = deployment.getFieldAsList("IgnorePaths"); if (iplist != null) { for (Struct df : iplist.getItems()) ignorepaths.add(df.toString()); } ListStruct dflist = deployment.getFieldAsList("DeleteFiles"); // deleting if (dflist != null) { for (Struct df : dflist.getItems()) { Path delpath = Paths.get(".", df.toString()); if (Files.exists(delpath)) { System.out.println("Deleting: " + delpath.toAbsolutePath()); try { Files.delete(delpath); } catch (IOException x) { System.out.println("Unable to Delete: " + x); } } } } // copying updates System.out.println("Checking for updated files: "); try { @SuppressWarnings("resource") ZipFile zf = new ZipFile(matchpack); Enumeration<ZipArchiveEntry> entries = zf.getEntries(); while (entries.hasMoreElements()) { ZipArchiveEntry entry = entries.nextElement(); String entryname = entry.getName().replace('\\', '/'); boolean xfnd = false; for (String exculde : ignorepaths) if (entryname.startsWith(exculde)) { xfnd = true; break; } if (xfnd) continue; System.out.print("."); Path localpath = Paths.get(".", entryname); if (entry.isDirectory()) { if (!Files.exists(localpath)) Files.createDirectories(localpath); } else { boolean hashmatch = false; if (Files.exists(localpath)) { String local = null; String update = null; try (InputStream lin = Files.newInputStream(localpath)) { local = HashUtil.getMd5(lin); } try (InputStream uin = zf.getInputStream(entry)) { update = HashUtil.getMd5(uin); } hashmatch = (StringUtil.isNotEmpty(local) && StringUtil.isNotEmpty(update) && local.equals(update)); } if (!hashmatch) { System.out.print("[" + entryname + "]"); try (InputStream uin = zf.getInputStream(entry)) { Files.createDirectories(localpath.getParent()); Files.copy(uin, localpath, StandardCopyOption.REPLACE_EXISTING); } catch (Exception x) { System.out.println("Error updating: " + entryname + " - " + x); return false; } } } } zf.close(); } catch (IOException x) { System.out.println("Error reading update package: " + x); } // updating local config deployed.setField("Version", fndver); OperationResult svres = Updater.saveDeployed(deployed); if (svres.hasErrors()) { System.out.println("Intalled: " + fndver + " but could not update deployed.json. Repair the file before continuing.\nError: " + svres.getMessage()); return false; } System.out.println("Intalled: " + fndver); return true; }
From source file:ee.sk.digidoc.factory.SAXDigiDocFactory.java
/** * Reads in a DigiDoc file. One of fname or isSdoc must be given. * @param fname signed doc filename/* w ww . j a v a 2s.c om*/ * @param isSdoc opened stream with DigiDoc data * The user must open and close it. * @param errs list of errors to fill with parsing errors. If given * then attempt is made to continue parsing on errors and return them in this list. * If not given (null) then the first error found will be thrown. * @return signed document object if successfully parsed */ private SignedDoc readSignedDocOfType(String fname, InputStream isSdoc, boolean isBdoc, List errs) throws DigiDocException { // Use an instance of ourselves as the SAX event handler SAXDigiDocFactory handler = this; m_errs = errs; DigiDocVerifyFactory.initProvider(); SAXParserFactory factory = SAXParserFactory.newInstance(); if (m_logger.isDebugEnabled()) m_logger.debug("Start reading ddoc/bdoc " + ((fname != null) ? "from file: " + fname : "from stream") + " bdoc: " + isBdoc); if (fname == null && isSdoc == null) { throw new DigiDocException(DigiDocException.ERR_READ_FILE, "No input file", null); } if (fname != null) { File inFile = new File(fname); if (!inFile.canRead() || inFile.length() == 0) { throw new DigiDocException(DigiDocException.ERR_READ_FILE, "Empty or unreadable input file", null); } } ZipFile zf = null; ZipArchiveInputStream zis = null; ZipArchiveEntry ze = null; InputStream isEntry = null; File fTmp = null; try { factory.setFeature("http://xml.org/sax/features/external-general-entities", false); factory.setFeature("http://xml.org/sax/features/external-parameter-entities", false); if (isBdoc) { // bdoc parsing // must be a bdoc document ? m_doc = new SignedDoc(); m_doc.setVersion(SignedDoc.BDOC_VERSION_1_0); m_doc.setFormat(SignedDoc.FORMAT_BDOC); Enumeration eFiles = null; if (fname != null) { zf = new ZipFile(fname, "UTF-8"); eFiles = zf.getEntries(); } else if (isSdoc != null) { zis = new ZipArchiveInputStream(isSdoc, "UTF-8", true, true); } ArrayList lSigFnames = new ArrayList(); ArrayList lDataFnames = new ArrayList(); // read all entries boolean bHasMimetype = false, bManifest1 = false; int nFil = 0; while ((zf != null && eFiles.hasMoreElements()) || (zis != null && ((ze = zis.getNextZipEntry()) != null))) { nFil++; // read entry if (zf != null) { // ZipFile ze = (ZipArchiveEntry) eFiles.nextElement(); isEntry = zf.getInputStream(ze); } else { // ZipArchiveInputStream int n = 0, nTot = 0; if ((ze.getName().equals(FILE_MIMETYPE) || ze.getName().equals(FILE_MANIFEST) || (ze.getName().startsWith(FILE_SIGNATURES) && ze.getName().endsWith(".xml"))) || (nMaxBdocFilCached <= 0 || (ze.getSize() < nMaxBdocFilCached && ze.getSize() >= 0))) { ByteArrayOutputStream bos = new ByteArrayOutputStream(); byte[] data = new byte[2048]; while ((n = zis.read(data)) > 0) { bos.write(data, 0, n); nTot += n; } if (m_logger.isDebugEnabled()) m_logger.debug("Read: " + nTot + " bytes from zip"); data = bos.toByteArray(); bos = null; isEntry = new ByteArrayInputStream(data); } else { File fCacheDir = new File(ConfigManager.instance().getStringProperty( "DIGIDOC_DF_CACHE_DIR", System.getProperty("java.io.tmpdir"))); fTmp = File.createTempFile("bdoc-data", ".tmp", fCacheDir); FileOutputStream fos = new FileOutputStream(fTmp); byte[] data = new byte[2048]; while ((n = zis.read(data)) > 0) { fos.write(data, 0, n); nTot += n; } if (m_logger.isDebugEnabled()) m_logger.debug("Read: " + nTot + " bytes from zip to: " + fTmp.getAbsolutePath()); fos.close(); isEntry = new FileInputStream(fTmp); } } if (m_logger.isDebugEnabled()) m_logger.debug("Entry: " + ze.getName() + " nlen: " + ze.getName().length() + " size: " + ze.getSize() + " dir: " + ze.isDirectory() + " comp-size: " + ze.getCompressedSize()); // mimetype file if (ze.getName().equals(FILE_MIMETYPE)) { if (m_logger.isDebugEnabled()) m_logger.debug("Check mimetype!"); checkBdocMimetype(isEntry); bHasMimetype = true; m_doc.setComment(ze.getComment()); if (nFil != 1) { m_logger.error("mimetype file is " + nFil + " file but must be first"); handleError(new DigiDocException(DigiDocException.ERR_DIGIDOC_BADXML, "mimetype file is not first zip entry", null)); } } else if (ze.getName().equals(FILE_MANIFEST)) { // manifest.xml file if (m_logger.isDebugEnabled()) m_logger.debug("Read manifest"); if (!bManifest1 && isEntry != null) { bManifest1 = true; BdocManifestParser mfparser = new BdocManifestParser(m_doc); mfparser.readManifest(isEntry); } else { m_logger.error("Found multiple manifest.xml files!"); throw new DigiDocException(DigiDocException.ERR_MULTIPLE_MANIFEST_FILES, "Found multiple manifest.xml files!", null); } } else if (ze.getName().startsWith(FILE_SIGNATURES) && ze.getName().endsWith(".xml")) { // some signature m_fileName = ze.getName(); if (m_logger.isDebugEnabled()) m_logger.debug("Reading bdoc siganture: " + m_fileName); boolean bExists = false; for (int j = 0; j < lSigFnames.size(); j++) { String s1 = (String) lSigFnames.get(j); if (s1.equals(m_fileName)) bExists = true; } if (bExists) { m_logger.error("Duplicate signature filename: " + m_fileName); handleError(new DigiDocException(DigiDocException.ERR_DIGIDOC_BADXML, "Duplicate signature filename: " + m_fileName, null)); } else lSigFnames.add(m_fileName); SAXParser saxParser = factory.newSAXParser(); ByteArrayOutputStream bos = new ByteArrayOutputStream(); int n = 0; byte[] data = new byte[2048]; while ((n = isEntry.read(data)) > 0) bos.write(data, 0, n); data = bos.toByteArray(); bos = null; if (m_logger.isDebugEnabled()) m_logger.debug( "Parsing bdoc: " + m_fileName + " size: " + ((data != null) ? data.length : 0)); saxParser.parse(new SignatureInputStream(new ByteArrayInputStream(data)), this); if (m_logger.isDebugEnabled()) m_logger.debug("Parsed bdoc: " + m_fileName); Signature sig1 = m_doc.getLastSignature(); m_sigComment = ze.getComment(); if (sig1 != null) { sig1.setPath(m_fileName); sig1.setComment(ze.getComment()); } } else { // probably a data file if (m_logger.isDebugEnabled()) m_logger.debug("Read data file: " + ze.getName()); if (!ze.isDirectory()) { boolean bExists = false; for (int j = 0; j < lDataFnames.size(); j++) { String s1 = (String) lDataFnames.get(j); if (s1.equals(ze.getName())) bExists = true; } if (bExists) { m_logger.error("Duplicate datafile filename: " + ze.getName()); handleError(new DigiDocException(DigiDocException.ERR_DIGIDOC_BADXML, "Duplicate datafile filename: " + ze.getName(), null)); } else lDataFnames.add(ze.getName()); DataFile df = m_doc.findDataFileById(ze.getName()); if (df != null) { if (ze.getSize() > 0) df.setSize(ze.getSize()); df.setContentType(DataFile.CONTENT_BINARY); df.setFileName(ze.getName()); } else { df = new DataFile(ze.getName(), DataFile.CONTENT_BINARY, ze.getName(), "application/binary", m_doc); if (m_doc.getDataFiles() == null) m_doc.setDataFiles(new ArrayList()); m_doc.getDataFiles().add(df); //m_doc.addDataFile(df); // this does some intiailization work unnecessary here } // enable caching if requested if (isEntry != null) df.setOrCacheBodyAndCalcHashes(isEntry); df.setComment(ze.getComment()); df.setLastModDt(new Date(ze.getTime())); // fix mime type according to DataObjectFormat Signature sig1 = m_doc.getLastSignature(); if (sig1 != null) { Reference dRef = sig1.getSignedInfo().getReferenceForDataFile(df); if (dRef != null) { DataObjectFormat dof = sig1.getSignedInfo() .getDataObjectFormatForReference(dRef); if (dof != null) { df.setMimeType(dof.getMimeType()); } } } } } if (fTmp != null) { fTmp.delete(); fTmp = null; } } // while zip entries if (!bHasMimetype) { m_logger.error("No mimetype file"); handleError(new DigiDocException(DigiDocException.ERR_DIGIDOC_BADXML, "Not a BDOC format file! No mimetype file!", null)); } // if no signatures exist then copy mime-type from manifest.xml to DataFile -s if (m_doc.countSignatures() == 0) { for (int i = 0; i < m_doc.countDataFiles(); i++) { DataFile df = m_doc.getDataFile(i); if (m_doc.getManifest() != null) { for (int j = 0; j < m_doc.getManifest().getNumFileEntries(); j++) { ManifestFileEntry mfe = m_doc.getManifest().getFileEntry(j); if (mfe.getFullPath() != null && mfe.getFullPath().equals(df.getFileName())) { df.setMimeType(mfe.getMediaType()); } // if fullpath } // for } // if } // for i } } else { // ddoc parsing if (m_logger.isDebugEnabled()) m_logger.debug("Reading ddoc: " + fname + " file: " + m_fileName); m_fileName = fname; SAXParser saxParser = factory.newSAXParser(); if (fname != null) saxParser.parse(new SignatureInputStream(new FileInputStream(fname)), this); else if (isSdoc != null) saxParser.parse(isSdoc, this); } } catch (org.xml.sax.SAXParseException ex) { m_logger.error("SAX Error: " + ex); handleError(ex); } catch (Exception ex) { m_logger.error("Error reading3: " + ex); ex.printStackTrace(); /*if(ex instanceof DigiDocException){ DigiDocException dex = (DigiDocException)ex; m_logger.error("Dex: " + ex); if(dex.getNestedException() != null) { dex.getNestedException().printStackTrace(); m_logger.error("Trace: "); } }*/ handleError(ex); } finally { // cleanup try { if (isEntry != null) { isEntry.close(); isEntry = null; } if (zis != null) zis.close(); if (zf != null) zf.close(); if (fTmp != null) { fTmp.delete(); fTmp = null; } } catch (Exception ex) { m_logger.error("Error closing streams and files: " + ex); } } // compare Manifest and DataFiles boolean bErrList = (errs != null); if (errs == null) errs = new ArrayList(); boolean bOk = DigiDocVerifyFactory.verifyManifestEntries(m_doc, errs); if (m_doc == null) { m_logger.error("Error reading4: doc == null"); handleError(new DigiDocException(DigiDocException.ERR_DIGIDOC_BADXML, "This document is not in ddoc or bdoc format", null)); } if (!bErrList && errs.size() > 0) { // if error list was not used then we have to throw exception. So we will throw the first one since we can only do it once DigiDocException ex = (DigiDocException) errs.get(0); throw ex; } return m_doc; }
From source file:net.sourceforge.pmd.it.ZipFileExtractor.java
/** * Extracts the given zip file into the tempDir. * @param zipPath the zip file to extract * @param tempDir the target directory// w w w .j ava2 s. co m * @throws Exception if any error happens during extraction */ public static void extractZipFile(Path zipPath, Path tempDir) throws Exception { ZipFile zip = new ZipFile(zipPath.toFile()); try { Enumeration<ZipArchiveEntry> entries = zip.getEntries(); while (entries.hasMoreElements()) { ZipArchiveEntry entry = entries.nextElement(); File file = tempDir.resolve(entry.getName()).toFile(); if (entry.isDirectory()) { assertTrue(file.mkdirs()); } else { try (InputStream data = zip.getInputStream(entry); OutputStream fileOut = new FileOutputStream(file);) { IOUtils.copy(data, fileOut); } if ((entry.getUnixMode() & OWNER_EXECUTABLE) == OWNER_EXECUTABLE) { file.setExecutable(true); } } } } finally { zip.close(); } }
From source file:net.test.aliyun.oss.ImportShooterData.java
@Override public void onStart(AppContext appContext) throws Throwable { String BasePath = "D:/shooterData/"; OSSClient client = appContext.getInstance(OSSClient.class); String tempPath = appContext.getEnvironment().envVar(Environment.HASOR_TEMP_PATH); //// w ww . ja va 2s . c o m File[] zipPacks = new File(BasePath).listFiles(); long intCount = 0; long size = 0; for (File zipfile : zipPacks) { String fileName = zipfile.getName(); fileName = fileName.split("\\.")[0]; ZipFile zipPack = new ZipFile(zipfile); Enumeration<ZipArchiveEntry> enumZip = zipPack.getEntries(); System.out.println(fileName); while (enumZip.hasMoreElements()) { ZipArchiveEntry ent = enumZip.nextElement(); if (ent.isDirectory() == true) { continue; } String itemName = ent.getName(); // // ObjectMetadata info = this.passInfo(tempPath, zipPack, ent); // info.addUserMetadata("oldFileName", itemName); // // String key = fileName + "/" + UUID.randomUUID().toString().replace("-", "") + ".rar"; //InputStream inStream = zipPack.getInputStream(ent); //PutObjectResult res = client.putObject("files-subtitle", key, inStream, info); // intCount++; long itemSize = ent.getSize(); String stated = String.format("%s-%s/%s\t%s\t%s", intCount, fileName, itemName, itemSize, ""); System.out.println(stated + " -> " + ""); size = size + itemSize; } zipPack.close(); } System.out.println(intCount + "\t" + size); }