List of usage examples for org.apache.commons.io.input CountingInputStream CountingInputStream
public CountingInputStream(InputStream in)
From source file:de.quadrillenschule.azocamsyncd.ftpservice.FTPConnection.java
public LinkedList<AZoFTPFile> download(LinkedList<AZoFTPFile> afs, LocalStorage localStorage) { if (afs.size() <= 0) { return afs; }/*www.j a v a2 s.c o m*/ LinkedList<AZoFTPFile> retval = new LinkedList<>(); /* for (AZoFTPFile a : afs) { retval.add(a); }*/ Collections.sort(afs, new Comparator<AZoFTPFile>() { @Override public int compare(AZoFTPFile o1, AZoFTPFile o2) { return o1.ftpFile.getTimestamp().compareTo(o2.ftpFile.getTimestamp()); } }); simplyConnect(FTP.BINARY_FILE_TYPE); notify(FTPConnectionStatus.CONNECTED, getLastWorkingConnection(), -1); if (afs.size() > 0) { AZoFTPFile af = afs.getFirst();//) { File localFile = null; try { localFile = localStorage.getLocalFile(af); } catch (IOException ex) { notify(FTPConnectionStatus.LOCALSTORAGEERROR, af.dir + af.ftpFile.getName(), -1); close(); return retval; } if (!localStorage.prepareLocalFile(localFile)) { notify(FTPConnectionStatus.LOCALSTORAGEERROR, af.dir + af.ftpFile.getName(), -1); close(); return retval; } FileOutputStream fos = null; InputStream is = null; try { fos = new FileOutputStream(localFile); ftpclient.setSoTimeout(LONG_TIMEOUT); is = ftpclient.retrieveFileStream(af.dir + af.ftpFile.getName()); cis = new CountingInputStream(is); downloadsize = af.ftpFile.getSize(); notify(FTPConnectionStatus.DOWNLOADING, af.dir + af.ftpFile.getName(), ((int) (100.0 * ((afs.indexOf(af) + 1.0) / (double) afs.size())))); // ftpclient.setDataTimeout(TIMEOUT); // ftpclient.setSoTimeout(TIMEOUT); // Files.copy(cis, localFile.toPath(), StandardCopyOption.REPLACE_EXISTING); try { IOUtils.copyLarge(cis, fos); } catch (Exception ie) { fos.close(); is.close(); } while (!ftpclient.completePendingCommand()) { try { Thread.currentThread().wait(500); } catch (InterruptedException ex) { Logger.getLogger(FTPConnection.class.getName()).log(Level.SEVERE, null, ex); } } ; is.close(); fos.close(); localStorage.setLatestIncoming(localFile); localStorage.addSyncedFile(af); notify(FTPConnectionStatus.NEW_LOCAL_FILE, localFile.getAbsolutePath(), -1); retval.add(af); notify(FTPConnectionStatus.SUCCESS, af.dir + af.ftpFile.getName(), ((int) (100.0 * ((afs.indexOf(af) + 2.0) / (double) afs.size())))); } catch (Exception ex) { try { is.close(); fos.close(); close(); localFile.delete(); simplyConnect(FTP.BINARY_FILE_TYPE); } catch (Exception ex2) { close(); } } } close(); return retval; }
From source file:com.blackducksoftware.integration.hub.cli.CLIDownloadService.java
public void customInstall(HubProxyInfo hubProxyInfo, CLILocation cliLocation, CIEnvironmentVariables ciEnvironmentVariables, final URL archive, String hubVersion, final String localHostName) throws IOException, InterruptedException, HubIntegrationException, IllegalArgumentException, EncryptionException { boolean cliMismatch = true; try {//from w w w .ja v a2 s . c o m final File hubVersionFile = cliLocation.createHubVersionFile(); if (hubVersionFile.exists()) { final String storedHubVersion = IOUtils.toString(new FileReader(hubVersionFile)); if (hubVersion.equals(storedHubVersion)) { cliMismatch = false; } else { hubVersionFile.delete(); hubVersionFile.createNewFile(); } } final File cliInstallDirectory = cliLocation.getCLIInstallDir(); if (!cliInstallDirectory.exists()) { cliMismatch = true; } if (cliMismatch) { logger.debug("Attempting to download the Hub CLI."); final FileWriter writer = new FileWriter(hubVersionFile); writer.write(hubVersion); writer.close(); hubVersionFile.setLastModified(0L); } final long cliTimestamp = hubVersionFile.lastModified(); URLConnection connection = null; try { Proxy proxy = null; if (hubProxyInfo != null) { String proxyHost = hubProxyInfo.getHost(); int proxyPort = hubProxyInfo.getPort(); String proxyUsername = hubProxyInfo.getUsername(); String proxyPassword = hubProxyInfo.getDecryptedPassword(); if (StringUtils.isNotBlank(proxyHost) && proxyPort > 0) { proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(proxyHost, proxyPort)); } if (proxy != null) { if (StringUtils.isNotBlank(proxyUsername) && StringUtils.isNotBlank(proxyPassword)) { AuthenticatorUtil.setAuthenticator(proxyUsername, proxyPassword); } else { AuthenticatorUtil.resetAuthenticator(); } } } if (proxy != null) { connection = archive.openConnection(proxy); } else { connection = archive.openConnection(); } connection.setIfModifiedSince(cliTimestamp); connection.connect(); } catch (final IOException ioe) { logger.error("Skipping installation of " + archive + " to " + cliLocation.getCanonicalPath() + ": " + ioe.toString()); return; } if (connection instanceof HttpURLConnection && ((HttpURLConnection) connection).getResponseCode() == HttpURLConnection.HTTP_NOT_MODIFIED) { // CLI has not been modified return; } final long sourceTimestamp = connection.getLastModified(); if (cliInstallDirectory.exists() && cliInstallDirectory.listFiles().length > 0) { if (!cliMismatch && sourceTimestamp == cliTimestamp) { logger.debug("The current Hub CLI is up to date."); return; } for (final File file : cliInstallDirectory.listFiles()) { FileUtils.deleteDirectory(file); } } else { cliInstallDirectory.mkdir(); } logger.debug("Updating the Hub CLI."); hubVersionFile.setLastModified(sourceTimestamp); logger.info("Unpacking " + archive.toString() + " to " + cliInstallDirectory.getCanonicalPath() + " on " + localHostName); final CountingInputStream cis = new CountingInputStream(connection.getInputStream()); try { unzip(cliInstallDirectory, cis, logger); updateJreSecurity(logger, cliLocation, ciEnvironmentVariables); } catch (final IOException e) { throw new IOException(String.format("Failed to unpack %s (%d bytes read of total %d)", archive, cis.getByteCount(), connection.getContentLength()), e); } } catch (final IOException e) { throw new IOException("Failed to install " + archive + " to " + cliLocation.getCanonicalPath(), e); } }
From source file:com.liferay.sync.engine.document.library.handler.DownloadFileHandler.java
@Override protected void doHandleResponse(HttpResponse httpResponse) throws Exception { Header errorHeader = httpResponse.getFirstHeader("Sync-Error"); if (errorHeader != null) { handleSiteDeactivatedException(); }//from w w w. j a va 2 s.com long syncAccountId = getSyncAccountId(); final Session session = SessionManager.getSession(syncAccountId); Header tokenHeader = httpResponse.getFirstHeader("Sync-JWT"); if (tokenHeader != null) { session.addHeader("Sync-JWT", tokenHeader.getValue()); } InputStream inputStream = null; SyncFile syncFile = getLocalSyncFile(); if ((syncFile == null) || isUnsynced(syncFile)) { return; } Path filePath = Paths.get(syncFile.getFilePathName()); try { HttpEntity httpEntity = httpResponse.getEntity(); inputStream = new CountingInputStream(httpEntity.getContent()) { @Override protected synchronized void afterRead(int n) { session.incrementDownloadedBytes(n); super.afterRead(n); } }; inputStream = new RateLimitedInputStream(inputStream, syncAccountId); if (httpResponse.getFirstHeader("Accept-Ranges") != null) { copyFile(syncFile, filePath, inputStream, true); } else { copyFile(syncFile, filePath, inputStream, false); } } finally { StreamUtil.cleanUp(inputStream); } }
From source file:ch.cyberduck.core.dav.DAVReadFeatureTest.java
@Test public void testReadCloseReleaseEntity() throws Exception { final Host host = new Host(new DAVSSLProtocol(), "svn.cyberduck.ch", new Credentials(PreferencesFactory.get().getProperty("connection.login.anon.name"), null)); final DAVSession session = new DAVSession(host); session.open(new DisabledHostKeyCallback()); session.login(new DisabledPasswordStore(), new DisabledLoginCallback(), new DisabledCancelCallback()); final TransferStatus status = new TransferStatus(); final Path test = new Path("/trunk/LICENSE.txt", EnumSet.of(Path.Type.file)); final CountingInputStream in = new CountingInputStream( new DAVReadFeature(session).read(test, status, new DisabledConnectionCallback())); in.close();/*from w w w . ja v a 2 s . com*/ assertEquals(0L, in.getByteCount(), 0L); session.close(); }
From source file:fi.hip.sicx.jclouds.JCloudClient.java
/** * Returns InputStream that can be read data from a wanted file in * the cloud. Note! Might be that is not working asynchronously - * or at least buffers tens of megabytes of data. * // w w w.java 2s . c om * @param currentFileInTheCloud Name of the file in the cloud * @param indatasize Tells how big the data data is which is to be * read. Used only to calculate progress of the * transfer. Note! This could be also asked from server? * @param sco observer * @return InputStream that can be used to read data from a file in the cloud */ public InputStream readData(String currentFileInTheCloud, int indatasize, StorageClientObserver sco) { this.dos = null; this.datasize = indatasize; this.futures = new ArrayList<Future<Blob>>(); this.futures.add(this.context.getAsyncBlobStore().getBlob(containerName, currentFileInTheCloud)); for (Future<Blob> future : this.futures) { try { this.cis = new CountingInputStream(future.get().getPayload().getInput()); } catch (InterruptedException e) { e.printStackTrace(); } catch (ExecutionException e) { e.printStackTrace(); } } return this.cis; }
From source file:ch.cyberduck.core.sds.SDSReadFeatureTest.java
@Test public void testReadCloseReleaseEntity() throws Exception { final Host host = new Host(new SDSProtocol(), "duck.ssp-europe.eu", new Credentials( System.getProperties().getProperty("sds.user"), System.getProperties().getProperty("sds.key"))); final SDSSession session = new SDSSession(host, new DisabledX509TrustManager(), new DefaultX509KeyManager()); session.open(new DisabledHostKeyCallback()); session.login(new DisabledPasswordStore(), new DisabledLoginCallback(), new DisabledCancelCallback()); final TransferStatus status = new TransferStatus(); final byte[] content = RandomUtils.nextBytes(32769); final TransferStatus writeStatus = new TransferStatus(); writeStatus.setLength(content.length); final Path room = new SDSDirectoryFeature(session) .mkdir(new Path(new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.directory, Path.Type.volume)), null, new TransferStatus()); final Path test = new Path(room, UUID.randomUUID().toString(), EnumSet.of(Path.Type.file)); final SDSWriteFeature writer = new SDSWriteFeature(session); final HttpResponseOutputStream<VersionId> out = writer.write(test, writeStatus, new DisabledConnectionCallback()); assertNotNull(out);// ww w . j a va2 s .co m new StreamCopier(writeStatus, writeStatus).transfer(new ByteArrayInputStream(content), out); final CountingInputStream in = new CountingInputStream( new SDSReadFeature(session).read(test, status, new DisabledConnectionCallback())); in.close(); assertEquals(0L, in.getByteCount(), 0L); new SDSDeleteFeature(session).delete(Collections.singletonList(room), new DisabledLoginCallback(), new Delete.DisabledCallback()); session.close(); }
From source file:com.moesol.geoserver.sync.client.AbstractClientSynchronizer.java
void processGmlResponse(Response response) throws IOException, SAXException, ParserConfigurationException { FeatureCollection<?, ?> features; if (response instanceof ResponseFeatureCollection) { ResponseFeatureCollection responseFeatures = (ResponseFeatureCollection) response; features = responseFeatures.getFeatureCollection(); } else {/* www.ja v a 2s . com*/ CountingInputStream counter = new CountingInputStream(response.getResultStream()); long s = System.currentTimeMillis(); features = (FeatureCollection<?, ?>) parseWfs(counter); long e = System.currentTimeMillis(); m_parseMillis = e - s; m_rxGml += counter.getByteCount(); } FeatureIterator<?> it = features.features(); try { while (it.hasNext()) { Feature feature = it.next(); FeatureId fid = feature.getIdentifier(); m_potentialDeletes.remove(fid); if (!m_features.containsKey(fid)) { m_listener.featureCreate(fid, feature); m_numCreates++; } else { m_listener.featureUpdate(fid, feature); m_numUpdates++; } } } finally { it.close(); } }
From source file:com.moesol.geoserver.sync.client.AbstractClientSynchronizer.java
private boolean processSha1SyncResponse(Response response) throws IOException { int expected = m_server.level() + 1; CountingInputStream counter = new CountingInputStream(response.getResultStream()); InputStreamReader reader = new InputStreamReader(new BufferedInputStream(counter), UTF8.UTF8); try {/*from w w w .j a v a 2 s . c om*/ m_server = new Gson().fromJson(reader, Sha1SyncJson.class); if (expected != m_server.level()) { throw new IllegalStateException( "Level warp! expected(" + expected + "), actual(" + m_server.level() + ")"); } if (!versionFeatures.getToken().equals(m_server.version())) { throw new IllegalStateException("Version warp! expected(" + versionFeatures.getToken() + "), actual(" + m_server.version() + ")"); } if (isServerEmpty()) { clearLocal(); return true; } if (isServerHashesEmpty()) { return true; } return false; } finally { m_rxBytes += counter.getByteCount(); reader.close(); } }
From source file:com.moesol.geoserver.sync.client.AbstractGeoserverClientSynchronizer.java
void processGmlResponse(Response response) throws IOException, SAXException, ParserConfigurationException { FeatureCollection<?, ?> features; if (response instanceof ResponseFeatureCollection) { ResponseFeatureCollection responseFeatures = (ResponseFeatureCollection) response; features = responseFeatures.getFeatureCollection(); } else {/* ww w . j ava 2 s. c o m*/ CountingInputStream counter = new CountingInputStream(response.getResultStream()); features = (FeatureCollection<?, ?>) parseWfs(counter); m_rxGml += counter.getByteCount(); } FeatureIterator<?> it = features.features(); try { while (it.hasNext()) { Feature feature = it.next(); FeatureId fid = feature.getIdentifier(); m_potentialDeletes.remove(fid); if (!m_features.containsKey(fid)) { m_listener.featureCreate(fid, feature); m_numCreates++; } else { m_listener.featureUpdate(fid, feature); m_numUpdates++; } } } finally { it.close(); } }
From source file:hudson.FilePath.java
/** * Given a tgz/zip file, extracts it to the given target directory, if necessary. * * <p>/*from ww w . j a v a 2 s .c o m*/ * This method is a convenience method designed for installing a binary package to a location * that supports upgrade and downgrade. Specifically, * * <ul> * <li>If the target directory doesn't exist {@linkplain #mkdirs() it'll be created}. * <li>The timestamp of the .tgz file is left in the installation directory upon extraction. * <li>If the timestamp left in the directory doesn't match with the timestamp of the current archive file, * the directory contents will be discarded and the archive file will be re-extracted. * <li>If the connection is refused but the target directory already exists, it is left alone. * </ul> * * @param archive * The resource that represents the tgz/zip file. This URL must support the "Last-Modified" header. * (Most common usage is to get this from {@link ClassLoader#getResource(String)}) * @param listener * If non-null, a message will be printed to this listener once this method decides to * extract an archive. * @return * true if the archive was extracted. false if the extraction was skipped because the target directory * was considered up to date. * @since 1.299 */ public boolean installIfNecessaryFrom(URL archive, TaskListener listener, String message) throws IOException, InterruptedException { URLConnection con; try { con = archive.openConnection(); con.connect(); } catch (IOException x) { if (this.exists()) { // Cannot connect now, so assume whatever was last unpacked is still OK. if (listener != null) { listener.getLogger() .println("Skipping installation of " + archive + " to " + remote + ": " + x); } return false; } else { throw x; } } long sourceTimestamp = con.getLastModified(); FilePath timestamp = this.child(".timestamp"); if (this.exists()) { if (timestamp.exists() && sourceTimestamp == timestamp.lastModified()) return false; // already up to date this.deleteContents(); } if (listener != null) listener.getLogger().println(message); CountingInputStream cis = new CountingInputStream(con.getInputStream()); try { if (archive.toExternalForm().endsWith(".zip")) unzipFrom(cis); else untarFrom(cis, GZIP); } catch (IOException e) { throw new IOException2(String.format("Failed to unpack %s (%d bytes read of total %d)", archive, cis.getByteCount(), con.getContentLength()), e); } timestamp.touch(sourceTimestamp); return true; }