List of usage examples for org.apache.commons.io.input TeeInputStream TeeInputStream
public TeeInputStream(InputStream input, OutputStream branch, boolean closeBranch)
From source file:eu.itesla_project.modules.histo.cache.TwoLevelsHistoDbCache.java
@Override public InputStream getData(String url) throws IOException { InputStream is = cache1.getData(url); if (is == null) { is = cache2.getData(url);//from w ww . j a va 2 s . c om if (is != null) { // synchronize with the first cache OutputStream os = cache1.putData(url); is = new TeeInputStream(is, os, true); } } return is; }
From source file:ibw.updater.common.config.ConfigurationLoader.java
private InputStream getConfigInputStream() throws IOException { ConfigurationInputStream configurationInputStream = ConfigurationInputStream.getConfigPropertiesInstance(); File configFile = ConfigurationDir.getConfigFile("config.active.properties"); if (configFile != null) { FileOutputStream fout = new FileOutputStream(configFile); TeeInputStream tin = new TeeInputStream(configurationInputStream, fout, true); return tin; }/*from ww w .java2s . com*/ return configurationInputStream; }
From source file:de.uni.stuttgart.informatik.ToureNPlaner.Net.Handler.SyncCoreLoader.java
private InputStream readCoreFileFromNetAndCache() throws IOException { HttpURLConnection con = null; try {/*from w w w . ja v a 2 s .c om*/ URL url = new URL(coreURL + pathPrefix + corePrefix + coreLevel + coreSuffix); con = (HttpURLConnection) url.openConnection(); File cacheDirFile = ToureNPlanerApplication.getContext().getExternalCacheDir(); Log.d(TAG, "Trying to download core to " + cacheDirFile.getAbsolutePath() + corePrefix + coreLevel + coreSuffix); FileOutputStream coreFileStream = new FileOutputStream( new File(cacheDirFile, corePrefix + coreLevel + coreSuffix)); Log.d(TAG, "Content-Length: " + con.getContentLength()); InputStream in = new BufferedInputStream(con.getInputStream()); TeeInputStream teeStream = new TeeInputStream(in, coreFileStream, true); return teeStream; } catch (MalformedURLException e) { e.printStackTrace(); if (con != null) { con.disconnect(); } return null; } }
From source file:net.solarnetwork.node.backup.ZipStreamBackupResource.java
@Override public InputStream getInputStream() throws IOException { // to support calling getInputStream() more than once, tee the input to a temp file // the first time, and subsequent times if (tempFile != null) { return new BufferedInputStream(new FileInputStream(tempFile)); }/*from w w w .ja va2s . c o m*/ tempFile = File.createTempFile(entry.getName(), ".tmp"); final BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(tempFile)); return new TeeInputStream(new FilterInputStream(stream) { @Override public void close() throws IOException { out.flush(); out.close(); } }, out, false); }
From source file:net.community.chest.gitcloud.facade.backend.git.BackendReceivePackFactory.java
@Override public ReceivePack create(C request, Repository db) throws ServiceNotEnabledException, ServiceNotAuthorizedException { final String logPrefix; if (request instanceof HttpServletRequest) { HttpServletRequest req = (HttpServletRequest) request; logPrefix = "create(" + req.getMethod() + ")[" + req.getRequestURI() + "][" + req.getQueryString() + "]"; } else {/*w w w . ja va 2s .c o m*/ logPrefix = "create(" + db.getDirectory() + ")"; } if (logger.isDebugEnabled()) { logger.debug(logPrefix + ": " + db.getDirectory()); } ReceivePack receive = new ReceivePack(db) { @Override @SuppressWarnings("synthetic-access") public void receive(InputStream input, OutputStream output, OutputStream messages) throws IOException { InputStream effIn = input; OutputStream effOut = output, effMessages = messages; if (logger.isTraceEnabled()) { LineLevelAppender inputAppender = new LineLevelAppender() { @Override public void writeLineData(CharSequence lineData) throws IOException { logger.trace(logPrefix + " upload(C): " + lineData); } @Override public boolean isWriteEnabled() { return true; } }; effIn = new TeeInputStream(effIn, new HexDumpOutputStream(inputAppender), true); LineLevelAppender outputAppender = new LineLevelAppender() { @Override public void writeLineData(CharSequence lineData) throws IOException { logger.trace(logPrefix + " upload(S): " + lineData); } @Override public boolean isWriteEnabled() { return true; } }; effOut = new TeeOutputStream(effOut, new HexDumpOutputStream(outputAppender)); if (effMessages != null) { LineLevelAppender messagesAppender = new LineLevelAppender() { @Override public void writeLineData(CharSequence lineData) throws IOException { logger.trace(logPrefix + " upload(M): " + lineData); } @Override public boolean isWriteEnabled() { return true; } }; // TODO review the decision to use an AsciiLineOutputStream here effMessages = new TeeOutputStream(effMessages, new AsciiLineOutputStream(messagesAppender)); } } super.receive(effIn, effOut, effMessages); } }; receive.setTimeout(receiveTimeoutValue); // TODO set pushing user identity for reflog // receive.setRefLogIdent(new PersonIdent(user.username, user.username + "@" + origin)) // TODO set advanced options // receive.setAllowCreates(user.canCreateRef(repository)); // receive.setAllowDeletes(user.canDeleteRef(repository)); // receive.setAllowNonFastForwards(user.canRewindRef(repository)); // TODO setup the receive hooks // receive.setPreReceiveHook(preRcvHook); // receive.setPostReceiveHook(postRcvHook); return receive; }
From source file:net.community.chest.gitcloud.facade.backend.git.BackendUploadPackFactory.java
@Override public UploadPack create(final C request, Repository db) throws ServiceNotEnabledException, ServiceNotAuthorizedException { final File dir = db.getDirectory(); final String logPrefix; if (request instanceof HttpServletRequest) { HttpServletRequest req = (HttpServletRequest) request; logPrefix = "create(" + req.getMethod() + ")[" + req.getRequestURI() + "][" + req.getQueryString() + "]"; } else {//ww w . j av a 2 s .c o m logPrefix = "create(" + dir.getAbsolutePath() + ")"; } if (logger.isDebugEnabled()) { logger.debug(logPrefix + ": " + dir.getAbsolutePath()); } UploadPack up = new UploadPack(db) { @Override @SuppressWarnings("synthetic-access") public void upload(InputStream input, OutputStream output, OutputStream messages) throws IOException { InputStream effIn = input; OutputStream effOut = output, effMessages = messages; if (logger.isTraceEnabled()) { LineLevelAppender inputAppender = new LineLevelAppender() { @Override public void writeLineData(CharSequence lineData) throws IOException { logger.trace(logPrefix + " upload(C): " + lineData); } @Override public boolean isWriteEnabled() { return true; } }; effIn = new TeeInputStream(effIn, new HexDumpOutputStream(inputAppender), true); LineLevelAppender outputAppender = new LineLevelAppender() { @Override public void writeLineData(CharSequence lineData) throws IOException { logger.trace(logPrefix + " upload(S): " + lineData); } @Override public boolean isWriteEnabled() { return true; } }; effOut = new TeeOutputStream(effOut, new HexDumpOutputStream(outputAppender)); if (effMessages != null) { LineLevelAppender messagesAppender = new LineLevelAppender() { @Override public void writeLineData(CharSequence lineData) throws IOException { logger.trace(logPrefix + " upload(M): " + lineData); } @Override public boolean isWriteEnabled() { return true; } }; // TODO review the decision to use an AsciiLineOutputStream here effMessages = new TeeOutputStream(effMessages, new AsciiLineOutputStream(messagesAppender)); } } super.upload(effIn, effOut, effMessages); } @Override @SuppressWarnings("synthetic-access") public void sendAdvertisedRefs(RefAdvertiser adv) throws IOException, ServiceMayNotContinueException { RefAdvertiser effAdv = adv; if (logger.isTraceEnabled() && (adv instanceof PacketLineOutRefAdvertiser)) { PacketLineOut pckOut = (PacketLineOut) ReflectionUtils.getField(pckOutField, adv); effAdv = new PacketLineOutRefAdvertiser(pckOut) { private final PacketLineOut pckLog = new PacketLineOut( // TODO review the decision to use an AsciiLineOutputStream here new AsciiLineOutputStream(new LineLevelAppender() { @Override public void writeLineData(CharSequence lineData) throws IOException { logger.trace(logPrefix + " S: " + lineData); } @Override public boolean isWriteEnabled() { return true; } })); @Override protected void writeOne(CharSequence line) throws IOException { String s = line.toString(); super.writeOne(s); pckLog.writeString(s); } @Override protected void end() throws IOException { super.end(); pckLog.end(); } }; } super.sendAdvertisedRefs(effAdv); } }; up.setTimeout(uploadTimeoutValue); return up; }
From source file:ee.ria.xroad.proxy.protocol.ProxyMessageEncoder.java
@Override public void attachment(String contentType, InputStream content, Map<String, String> additionalHeaders) throws Exception { log.trace("writeAttachment({})", contentType); if (!inAttachmentPart) { mpEncoder.startNested(attachmentBoundary); inAttachmentPart = true;/*from www. j a v a 2 s . c o m*/ } DigestCalculator calc = createDigestCalculator(hashAlgoId); CountingOutputStream cos = new CountingOutputStream(calc.getOutputStream()); TeeInputStream proxyIs = new TeeInputStream(content, cos, true); mpEncoder.startPart(contentType, toHeaders(additionalHeaders)); mpEncoder.write(proxyIs); attachmentsByteCount += cos.getByteCount(); signer.addPart(MessageFileNames.attachment(++attachmentNo), hashAlgoId, calc.getDigest()); }
From source file:net.i2cat.netconf.transport.VirtualTransport.java
private void startParsing() { parserThread = new Thread("Parser") { // private Log log = LogFactory.getLog(parserThread.class); @Override// ww w .j av a2s . c o m public void run() { log.debug("Parsing thread start"); while (!closed) { try { log.debug("Starting parser."); // flag to log server response if (sessionContext.isLogRespXML()) parser.parse(new InputSource(new TeeInputStream(inStream, new FileOutputStream(sessionContext.getLogFileXML()), true))); else { parser.parse(new InputSource(inStream)); } } catch (InterruptedIOException ie) { log.warn( "While parsing: Got and InterruptedIOException. If you are closing it may be normal."); } catch (IOException e) { log.error("While parsing (IOException): " + e.getMessage()); e.printStackTrace(); } catch (SAXException e) { if (e.getMessage().contentEquals("Content is not allowed in trailing section.")) { log.debug("While parsing: Detected netconf delimiter."); // Using shitty non-xml delimiters forces us to // detect // end-of-frame delimiter by a SAX error. // Do nothing will just restart the parser. // Blame netconf } else { log.error("While parsing (SAXException): " + e.getMessage()); e.printStackTrace(); disconnect(); } log.info("End of parsing."); } log.debug("Looping"); } log.debug("Parsing thread ended"); } }; parserThread.start(); }
From source file:eu.itesla_project.histodb.client.impl.HistoDbHttpClientImpl.java
private InputStream cachedHttpRequest(HttpUriRequest request, HistoDbUrl url) throws IOException { InputStream is = null;/*www . j ava2 s . co m*/ if (cache != null) { is = cache.getData(url.format()); } if (is != null) { if (LOGGER.isTraceEnabled()) { LOGGER.trace("Using cached data for query " + url.format()); } if (LOGGER.isDebugEnabled()) { LOGGER.debug("Using cached data for query " + url.prettyFormat()); } } else { is = httpRequest(request, url); if (cache != null && is != null) { OutputStream os = cache.putData(url.format()); is = new TeeInputStream(is, os, true); } } return is; }
From source file:com.aperigeek.dropvault.web.dao.MongoFileService.java
public void put(final String username, String resource, InputStream data, long length, String contentType, final char[] password) throws ResourceNotFoundException, IOException { final String[] path = resource.split("/"); Resource parent = getResourceAt(getRootFolder(username), Arrays.copyOfRange(path, 0, path.length - 2)); DBCollection files = mongo.getDataBase().getCollection("files"); DBCollection contents = mongo.getDataBase().getCollection("contents"); ContentDetector contentDetector = null; if (contentType == null) { PipedInputStream pipeIn = new PipedInputStream(); PipedOutputStream pipeOut = new PipedOutputStream(pipeIn); TeeInputStream tee = new TeeInputStream(data, pipeOut, true); contentDetector = new ContentDetector(path[path.length - 1], pipeIn); contentDetector.start();/*w w w. ja v a2 s . c o m*/ data = tee; } final File dataFile = createDataFile(data, username, password); if (contentDetector != null) { try { contentDetector.join(); contentType = contentDetector.getContentType(); } catch (InterruptedException ex) { Logger.getLogger(MongoFileService.class.getName()).log(Level.SEVERE, null, ex); } } Resource child = getChild(parent, path[path.length - 1]); if (child != null) { DBObject filter = new BasicDBObject(); filter.put("_id", child.getId()); DBObject update = new BasicDBObject("modificationDate", new Date()); update.put("contentLength", length); update.put("contentType", contentType); files.update(filter, new BasicDBObject("$set", update)); contents.update(new BasicDBObject("resource", child.getId()), new BasicDBObject("$set", new BasicDBObject("file", dataFile.getAbsolutePath()))); } else { DBObject childObj = new BasicDBObject(); ObjectId objId = new ObjectId(); childObj.put("_id", objId); childObj.put("user", username); childObj.put("name", path[path.length - 1]); childObj.put("parent", parent.getId()); childObj.put("type", Resource.ResourceType.FILE.toString()); childObj.put("creationDate", new Date()); childObj.put("modificationDate", new Date()); childObj.put("contentType", contentType); childObj.put("contentLength", length); files.insert(childObj); DBObject content = new BasicDBObject(); content.put("resource", objId); content.put("file", dataFile.getAbsolutePath()); contents.insert(content); files.update(new BasicDBObject("_id", parent.getId()), new BasicDBObject("$set", new BasicDBObject("modificationDate", new Date()))); child = buildResource(childObj); } final String fContentType = contentType; final Resource fChild = child; new Thread() { public void run() { try { Map<String, String> metadata = extractionService.extractContent(path[path.length - 1], readFile(dataFile, username, password), fContentType); metadata.put("name", path[path.length - 1]); indexService.remove(username, new String(password), fChild.getId().toString()); indexService.index(username, new String(password), fChild.getId().toString(), metadata); } catch (Exception ex) { Logger.getLogger(MongoFileService.class.getName()).log(Level.SEVERE, "Index failed for " + path[path.length - 1], ex); } } }.start(); }