List of usage examples for org.apache.commons.compress.archivers.tar TarArchiveInputStream read
public int read(byte b[]) throws IOException
b
. From source file:com.alcatel_lucent.nz.wnmsextract.reader.FileSelector.java
/** * extract tarfile to constituent parts processing gzips along the way * yyyyMMdd.tar->/yyyyMMdd/INode-CH_RNC01/A2010...gz *//*from w ww . j a va 2 s . c om*/ protected void untar(File tf) throws FileNotFoundException { try { TarArchiveInputStream tais = new TarArchiveInputStream(new FileInputStream(tf)); TarArchiveEntry t1 = null; while ((t1 = tais.getNextTarEntry()) != null) { if (t1.isDirectory()) { if (t1.getName().contains("account")) identifier = ".vcc"; else identifier = ""; } else { String fn = t1.getName().substring(t1.getName().lastIndexOf("/")); File f = new File(getCalTempPath() + fn); FileOutputStream fos = new FileOutputStream(f); BufferedOutputStream bos = new BufferedOutputStream(fos, BUFFER); int n = 0; byte[] content = new byte[BUFFER]; while (-1 != (n = tais.read(content))) { fos.write(content, 0, n); } bos.flush(); bos.close(); fos.close(); File unz = null; if (f.getName().endsWith("zip")) unz = unzip3(f); else unz = ungzip(f); if (unz != null) allfiles.add(unz); f.delete(); } } tais.close(); } catch (IOException ioe) { jlog.fatal("IO read error :: " + ioe); } }
From source file:com.buaa.cfs.utils.FileUtil.java
private static void unpackEntries(TarArchiveInputStream tis, TarArchiveEntry entry, File outputDir) throws IOException { if (entry.isDirectory()) { File subDir = new File(outputDir, entry.getName()); if (!subDir.mkdirs() && !subDir.isDirectory()) { throw new IOException("Mkdirs failed to create tar internal dir " + outputDir); }//from ww w. j a v a 2s. c o m for (TarArchiveEntry e : entry.getDirectoryEntries()) { unpackEntries(tis, e, subDir); } return; } File outputFile = new File(outputDir, entry.getName()); if (!outputFile.getParentFile().exists()) { if (!outputFile.getParentFile().mkdirs()) { throw new IOException("Mkdirs failed to create tar internal dir " + outputDir); } } int count; byte data[] = new byte[2048]; BufferedOutputStream outputStream = new BufferedOutputStream(new FileOutputStream(outputFile)); while ((count = tis.read(data)) != -1) { outputStream.write(data, 0, count); } outputStream.flush(); outputStream.close(); }
From source file:autoupdater.FileDAO.java
/** * Untars a .tar.//w w w . j a v a2s . c o m * * @param fileToUntar * @return true if successful * @throws FileNotFoundException * @throws IOException */ private boolean untar(File fileToUntar) throws FileNotFoundException, IOException { boolean fileUntarred = false; String untarLocation = fileToUntar.getParentFile().getAbsolutePath(); TarArchiveInputStream tarStream = null; try { tarStream = new TarArchiveInputStream(new FileInputStream(fileToUntar)); BufferedReader bufferedTarReader = null; try { bufferedTarReader = new BufferedReader(new InputStreamReader(tarStream)); ArchiveEntry entry; while ((entry = tarStream.getNextEntry()) != null) { byte[] buffer = new byte[8 * 1024]; File tempFile = new File(String.format("%s/%s", untarLocation, entry.getName())); if (entry.isDirectory()) { if (!tempFile.exists()) { tempFile.mkdir(); } } else { OutputStream output = new FileOutputStream(tempFile); try { int bytesRead; while ((bytesRead = tarStream.read(buffer)) != -1) { output.write(buffer, 0, bytesRead); } } finally { output.close(); } tempFile.setExecutable(true); // make sure the binary files can be executed } } } finally { if (bufferedTarReader != null) { bufferedTarReader.close(); } } } finally { if (tarStream != null) { tarStream.close(); } } return fileUntarred; }
From source file:in.neoandroid.neoupdate.neoUpdate.java
private String getMetaFromNPK() { try {/*from w w w . j av a2 s . c om*/ GZIPInputStream npkFile = new GZIPInputStream(new FileInputStream(baseUrl)); //FileInputStream npkFile = new FileInputStream(baseUrl); TarArchiveInputStream input = new TarArchiveInputStream(npkFile); TarArchiveEntry ae; while ((ae = input.getNextTarEntry()) != null) { if (ae.isDirectory()) Log.e("[neoUpdate]", "Dir: " + ae.getName()); else Log.e("[neoUpdate]", "File: " + ae.getName()); if (ae.getName().equalsIgnoreCase("neoupdate.json")) { byte buff[] = new byte[(int) ae.getSize()]; input.read(buff); input.close(); return new String(buff); } } input.close(); } catch (Exception e) { e.printStackTrace(); } return null; }
From source file:com.cloudera.cli.validator.components.ParcelFileRunner.java
@Override public boolean run(String target, Writer writer) throws IOException { File parcelFile = new File(target); writer.write(String.format("Validating: %s\n", parcelFile.getPath())); if (!checkExistence(parcelFile, false, writer)) { return false; }//from w ww . j av a 2 s. c o m String expectedDir; String distro; Matcher parcelMatcher = PARCEL_PATTERN.matcher(parcelFile.getName()); if (parcelMatcher.find()) { expectedDir = parcelMatcher.group(1) + '-' + parcelMatcher.group(2); distro = parcelMatcher.group(3); } else { writer.write(String.format("==> %s is not a valid parcel filename\n", parcelFile.getName())); return false; } if (!KNOWN_DISTROS.contains(distro)) { writer.write(String.format("==> %s does not appear to be a distro supported by CM\n", distro)); } FileInputStream fin = null; BufferedInputStream bin = null; GzipCompressorInputStream gin = null; TarArchiveInputStream tin = null; try { InputStream in = null; fin = new FileInputStream(parcelFile); bin = new BufferedInputStream(fin); try { gin = new GzipCompressorInputStream(bin); in = gin; } catch (IOException e) { // It's not compressed. Proceed as if uncompressed tar. writer.write(String.format("==> Warning: Parcel is not compressed with gzip\n")); in = bin; } tin = new TarArchiveInputStream(in); byte[] parcelJson = null; byte[] alternativesJson = null; byte[] permissionsJson = null; Map<String, Boolean> tarEntries = Maps.newHashMap(); Set<String> unexpectedDirs = Sets.newHashSet(); for (TarArchiveEntry e = tin.getNextTarEntry(); e != null; e = tin.getNextTarEntry()) { String name = e.getName(); // Remove trailing '/' tarEntries.put(name.replaceAll("/$", ""), e.isDirectory()); if (!StringUtils.startsWith(name, expectedDir)) { unexpectedDirs.add(name.split("/")[0]); } if (e.getName().equals(expectedDir + PARCEL_JSON_PATH)) { parcelJson = new byte[(int) e.getSize()]; tin.read(parcelJson); } else if (e.getName().equals(expectedDir + ALTERNATIVES_JSON_PATH)) { alternativesJson = new byte[(int) e.getSize()]; tin.read(alternativesJson); } else if (e.getName().equals(expectedDir + PERMISSIONS_JSON_PATH)) { permissionsJson = new byte[(int) e.getSize()]; tin.read(permissionsJson); } } boolean ret = true; if (!unexpectedDirs.isEmpty()) { writer.write(String.format("==> The following unexpected top level directories were observed: %s\n", unexpectedDirs.toString())); writer.write( String.format("===> The only valid top level directory, based on parcel filename, is: %s\n", expectedDir)); ret = false; } ret &= checkParcelJson(expectedDir, parcelJson, tarEntries, writer); ret &= checkAlternatives(expectedDir, alternativesJson, tarEntries, writer); ret &= checkPermissions(expectedDir, permissionsJson, tarEntries, writer); return ret; } catch (IOException e) { writer.write(String.format("==> %s: %s\n", e.getClass().getName(), e.getMessage())); return false; } finally { IOUtils.closeQuietly(tin); IOUtils.closeQuietly(gin); IOUtils.closeQuietly(bin); IOUtils.closeQuietly(fin); } }
From source file:data.TarExtractorTest.java
@Test public void itExtractsTarFile() throws Exception { TarArchiveInputStream tarArchiveInputStream = mock(TarArchiveInputStream.class); whenNew(TarArchiveInputStream.class).withArguments(any(InputStream.class)) .thenReturn(tarArchiveInputStream); when(tarArchiveInputStream.getNextTarEntry()).thenAnswer(new Answer() { private int count = 0; public Object answer(InvocationOnMock invocationOnMock) { count++;/*ww w . j a v a2 s . co m*/ if (count == 1) { TarArchiveEntry tarArchiveEntry = mock(TarArchiveEntry.class); when(tarArchiveEntry.getName()).thenReturn("data.gpdb"); when(tarArchiveEntry.isFile()).thenReturn(true); return tarArchiveEntry; } if (count == 2) { TarArchiveEntry tarArchiveEntry = mock(TarArchiveEntry.class); when(tarArchiveEntry.getName()).thenReturn("IpV6Data"); when(tarArchiveEntry.isDirectory()).thenReturn(true); return tarArchiveEntry; } return null; } }); File directory = mock(File.class); File fileInTar = spy(mock(File.class)); when(fileInTar.createNewFile()).thenReturn(true); whenNew(File.class).withArguments(directory, "data.gpdb").thenReturn(fileInTar); File directoryInTar = spy(mock(File.class)); when(directoryInTar.createNewFile()).thenReturn(true); whenNew(File.class).withArguments(directory, "IpV6Data").thenReturn(directoryInTar); FileOutputStream fileOutputStream = mock(FileOutputStream.class); whenNew(FileOutputStream.class).withArguments(fileInTar).thenReturn(fileOutputStream); when(tarArchiveInputStream.read(any(byte[].class))).thenAnswer(new Answer() { private int count = 0; public Object answer(InvocationOnMock invocationOnMock) { count++; return (count == 1) ? new Integer(654321) : new Integer(-1); } }); InputStream inputStream1 = mock(InputStream.class); TarExtractor tarExtractor = new TarExtractor(); assertThat(tarExtractor.extractTo(directory, inputStream1), equalTo(true)); verify(fileInTar).createNewFile(); verify(fileOutputStream).write(any(byte[].class), eq(0), eq(654321)); verify(fileOutputStream).close(); verifyNoMoreInteractions(fileOutputStream); verifyZeroInteractions(directoryInTar); }
From source file:adams.core.io.TarUtils.java
/** * Decompresses the specified file from a tar file. * * @param input the tar file to decompress * @param archiveFile the file from the archive to extract * @param output the name of the output file * @param createDirs whether to create the directory structure represented * by output file// ww w . j a v a 2s . co m * @param bufferSize the buffer size to use * @param errors for storing potential errors * @return whether file was successfully extracted */ public static boolean decompress(File input, String archiveFile, File output, boolean createDirs, int bufferSize, StringBuilder errors) { boolean result; FileInputStream fis; TarArchiveInputStream archive; TarArchiveEntry entry; File outFile; String outName; byte[] buffer; FileOutputStream fos; BufferedOutputStream out; int len; String error; long size; long read; result = false; archive = null; fis = null; fos = null; try { // decompress archive buffer = new byte[bufferSize]; fis = new FileInputStream(input.getAbsoluteFile()); archive = openArchiveForReading(input, fis); while ((entry = archive.getNextTarEntry()) != null) { if (entry.isDirectory()) continue; if (!entry.getName().equals(archiveFile)) continue; out = null; outName = null; try { // output name outName = output.getAbsolutePath(); // create directory, if necessary outFile = new File(outName).getParentFile(); if (!outFile.exists()) { if (!createDirs) { error = "Output directory '" + outFile.getAbsolutePath() + " does not exist', " + "skipping extraction of '" + outName + "'!"; System.err.println(error); errors.append(error + "\n"); break; } else { if (!outFile.mkdirs()) { error = "Failed to create directory '" + outFile.getAbsolutePath() + "', " + "skipping extraction of '" + outName + "'!"; System.err.println(error); errors.append(error + "\n"); break; } } } // extract data fos = new FileOutputStream(outName); out = new BufferedOutputStream(fos, bufferSize); size = entry.getSize(); read = 0; while (read < size) { len = archive.read(buffer); read += len; out.write(buffer, 0, len); } result = true; break; } catch (Exception e) { result = false; error = "Error extracting '" + entry.getName() + "' to '" + outName + "': " + e; System.err.println(error); errors.append(error + "\n"); } finally { FileUtils.closeQuietly(out); FileUtils.closeQuietly(fos); } } } catch (Exception e) { result = false; e.printStackTrace(); errors.append("Error occurred: " + e + "\n"); } finally { FileUtils.closeQuietly(fis); if (archive != null) { try { archive.close(); } catch (Exception e) { // ignored } } } return result; }
From source file:gov.nih.nci.ncicb.tcga.dcc.dam.processors.FilePackagerFastTest.java
private void checkTar(final File f_tar, final Boolean isMafArchive) throws IOException { FileReader origReader = null; TarArchiveInputStream tarIn = null; try {// w w w .j a va 2 s. com //we're going to open each of the included files in turn and compare to our tiny //original input file. So first, need to read the original file into a string. StringBuilder origBuf = new StringBuilder(); char[] cbuf = new char[1024]; //noinspection IOResourceOpenedButNotSafelyClosed origReader = new FileReader(THIS_FOLDER + TEST_DOWNLOADFILE); int iread; while ((iread = origReader.read(cbuf)) != -1) { for (int i = 0; i < iread; i++) { origBuf.append(cbuf[i]); } } String origText = origBuf.toString(); //noinspection IOResourceOpenedButNotSafelyClosed tarIn = new TarArchiveInputStream(new FileInputStream(f_tar)); TarArchiveEntry entry; int i = 0; entry = tarIn.getNextTarEntry(); assertEquals("file_manifest.txt", entry.getName()); if (isMafArchive) { entry = tarIn.getNextTarEntry(); assertEquals("README_DCC.txt", entry.getName()); } while ((entry = tarIn.getNextTarEntry()) != null) { //compare to input file File expectedName = new File("platform" + i + "/center" + i + "/Level_1/f" + i + ".idat"); assertEquals(expectedName, new File(entry.getName())); byte[] content = new byte[2056]; OutputStream byteOut = new ByteArrayOutputStream(2056); //noinspection ResultOfMethodCallIgnored tarIn.read(content); byteOut.write(content); byteOut.close(); assertEquals(origText, byteOut.toString().trim()); i++; } assertEquals(i, HOWMANYFILES); } finally { IOUtils.closeQuietly(origReader); IOUtils.closeQuietly(tarIn); } }
From source file:adams.core.io.TarUtils.java
/** * Decompresses the files in a tar file. Files can be filtered based on their * filename, using a regular expression (the matching sense can be inverted). * * @param input the tar file to decompress * @param outputDir the directory where to store the extracted files * @param createDirs whether to re-create the directory structure from the * tar file//from w w w.j ava 2 s . co m * @param match the regular expression that the files are matched against * @param invertMatch whether to invert the matching sense * @param bufferSize the buffer size to use * @param errors for storing potential errors * @return the successfully extracted files */ public static List<File> decompress(File input, File outputDir, boolean createDirs, BaseRegExp match, boolean invertMatch, int bufferSize, StringBuilder errors) { List<File> result; FileInputStream fis; TarArchiveInputStream archive; TarArchiveEntry entry; File outFile; String outName; byte[] buffer; BufferedOutputStream out; FileOutputStream fos; int len; String error; long size; long read; result = new ArrayList<>(); archive = null; fis = null; fos = null; try { // decompress archive buffer = new byte[bufferSize]; fis = new FileInputStream(input.getAbsoluteFile()); archive = openArchiveForReading(input, fis); while ((entry = archive.getNextTarEntry()) != null) { if (entry.isDirectory() && !createDirs) continue; // does name match? if (!match.isMatchAll() && !match.isEmpty()) { if (invertMatch && match.isMatch(entry.getName())) continue; else if (!invertMatch && !match.isMatch(entry.getName())) continue; } // extract if (entry.isDirectory() && createDirs) { outFile = new File(outputDir.getAbsolutePath() + File.separator + entry.getName()); if (!outFile.mkdirs()) { error = "Failed to create directory '" + outFile.getAbsolutePath() + "'!"; System.err.println(error); errors.append(error + "\n"); } } else { out = null; outName = null; try { // assemble output name outName = outputDir.getAbsolutePath() + File.separator; if (createDirs) outName += entry.getName(); else outName += new File(entry.getName()).getName(); // create directory, if necessary outFile = new File(outName).getParentFile(); if (!outFile.exists()) { if (!outFile.mkdirs()) { error = "Failed to create directory '" + outFile.getAbsolutePath() + "', " + "skipping extraction of '" + outName + "'!"; System.err.println(error); errors.append(error + "\n"); continue; } } // extract data fos = new FileOutputStream(outName); out = new BufferedOutputStream(fos, bufferSize); size = entry.getSize(); read = 0; while (read < size) { len = archive.read(buffer); read += len; out.write(buffer, 0, len); } result.add(new File(outName)); } catch (Exception e) { error = "Error extracting '" + entry.getName() + "' to '" + outName + "': " + e; System.err.println(error); errors.append(error + "\n"); } finally { FileUtils.closeQuietly(out); FileUtils.closeQuietly(fos); } } } } catch (Exception e) { e.printStackTrace(); errors.append("Error occurred: " + e + "\n"); } finally { FileUtils.closeQuietly(fis); if (archive != null) { try { archive.close(); } catch (Exception e) { // ignored } } } return result; }
From source file:freenet.client.ArchiveManager.java
private void handleTARArchive(ArchiveStoreContext ctx, FreenetURI key, InputStream data, String element, ArchiveExtractCallback callback, MutableBoolean gotElement, boolean throwAtExit, ClientContext context) throws ArchiveFailureException, ArchiveRestartException { if (logMINOR) Logger.minor(this, "Handling a TAR Archive"); TarArchiveInputStream tarIS = null; try {//from w ww . ja va2 s. com tarIS = new TarArchiveInputStream(data); // MINOR: Assumes the first entry in the tarball is a directory. ArchiveEntry entry; byte[] buf = new byte[32768]; HashSet<String> names = new HashSet<String>(); boolean gotMetadata = false; outerTAR: while (true) { try { entry = tarIS.getNextEntry(); } catch (IllegalArgumentException e) { // Annoyingly, it can throw this on some corruptions... throw new ArchiveFailureException("Error reading archive: " + e.getMessage(), e); } if (entry == null) break; if (entry.isDirectory()) continue; String name = stripLeadingSlashes(entry.getName()); if (names.contains(name)) { Logger.error(this, "Duplicate key " + name + " in archive " + key); continue; } long size = entry.getSize(); if (name.equals(".metadata")) gotMetadata = true; if (size > maxArchivedFileSize && !name.equals(element)) { addErrorElement( ctx, key, name, "File too big: " + size + " greater than current archived file size limit " + maxArchivedFileSize, true); } else { // Read the element long realLen = 0; Bucket output = tempBucketFactory.makeBucket(size); OutputStream out = output.getOutputStream(); try { int readBytes; while ((readBytes = tarIS.read(buf)) > 0) { out.write(buf, 0, readBytes); readBytes += realLen; if (readBytes > maxArchivedFileSize) { addErrorElement(ctx, key, name, "File too big: " + maxArchivedFileSize + " greater than current archived file size limit " + maxArchivedFileSize, true); out.close(); out = null; output.free(); continue outerTAR; } } } finally { if (out != null) out.close(); } if (size <= maxArchivedFileSize) { addStoreElement(ctx, key, name, output, gotElement, element, callback, context); names.add(name); trimStoredData(); } else { // We are here because they asked for this file. callback.gotBucket(output, context); gotElement.value = true; addErrorElement( ctx, key, name, "File too big: " + size + " greater than current archived file size limit " + maxArchivedFileSize, true); } } } // If no metadata, generate some if (!gotMetadata) { generateMetadata(ctx, key, names, gotElement, element, callback, context); trimStoredData(); } if (throwAtExit) throw new ArchiveRestartException("Archive changed on re-fetch"); if ((!gotElement.value) && element != null) callback.notInArchive(context); } catch (IOException e) { throw new ArchiveFailureException("Error reading archive: " + e.getMessage(), e); } finally { Closer.close(tarIS); } }