List of usage examples for org.apache.commons.io.input CloseShieldInputStream CloseShieldInputStream
public CloseShieldInputStream(InputStream in)
From source file:io.personium.core.bar.BarFileReadRunner.java
/** * WebDAV??./*w w w . j a va 2 s . com*/ * @param entryName bar??? * @param inputStream * @param webdavCols WebDAV * @return true: ??false: */ protected boolean registWebDavFile(String entryName, InputStream inputStream, Map<String, DavCmp> webdavCols) { // ???? String filePath = entryName.replaceAll(CONTENTS_DIR, ""); String colPath = entryName.substring(0, entryName.lastIndexOf("/") + 1); // DavCmp? DavCmp parentCmp = webdavCols.get(colPath); // ??? int maxChildResource = PersoniumUnitConfig.getMaxChildResourceCount(); if (parentCmp.getChildrenCount() >= maxChildResource) { // ????????????? String message = PersoniumCoreMessageUtils.getMessage("PR400-DV-0007"); log.info(message); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; } // ??? // ?? String fileName = ""; fileName = filePath.substring(filePath.lastIndexOf("/") + 1); // ? DavCmp fileCmp = parentCmp.getChild(fileName); // Content-Type?? String contentType = null; try { contentType = this.davFileMap.get(entryName); RuntimeDelegate.getInstance().createHeaderDelegate(MediaType.class).fromString(contentType); } catch (Exception e) { String message = PersoniumCoreMessageUtils.getMessage("PL-BI-2005"); log.info(message + ": " + e.getMessage(), e.fillInStackTrace()); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; } // try { fileCmp.putForCreate(contentType, new CloseShieldInputStream(inputStream)); } catch (Exception e) { String message = PersoniumCoreMessageUtils.getMessage("PL-BI-2004"); log.info(message + ": " + e.getMessage(), e.fillInStackTrace()); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; } return true; }
From source file:com.fujitsu.dc.core.bar.BarFileReadRunner.java
/** * WebDAV??./*from w w w .j a v a 2 s .c o m*/ * @param entryName bar??? * @param inputStream * @param webdavCols WebDAV * @return true: ??false: */ protected boolean registWebDavFile(String entryName, InputStream inputStream, Map<String, DavCmpEsImpl> webdavCols) { // ???? String filePath = entryName.replaceAll(CONTENTS_DIR, ""); String colPath = entryName.substring(0, entryName.lastIndexOf("/") + 1); // DavCmp? DavCmpEsImpl parentCmp = webdavCols.get(colPath); String parentId = parentCmp.getId(); // ??? int maxChildResource = DcCoreConfig.getMaxChildResourceCount(); if (parentCmp.getChildrenCount() >= maxChildResource) { // ????????????? String message = DcCoreMessageUtils.getMessage("PR400-DV-0007"); log.info(message); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; } // ??? DavNode davNode = new DavNode(this.cell.getId(), this.box.getId(), DavCmp.TYPE_DAV_FILE); davNode.setParentId(parentId); // ?? String fileName = ""; fileName = filePath.substring(filePath.lastIndexOf("/") + 1); String davNodeId = davNode.getId(); DavCmpEsImpl fileCmp = new DavCmpEsImpl(fileName, parentCmp, this.cell, this.box, davNodeId); // Content-Type?? String contentType = null; try { contentType = this.davFileMap.get(entryName); RuntimeDelegate.getInstance().createHeaderDelegate(MediaType.class).fromString(contentType); } catch (Exception e) { String message = DcCoreMessageUtils.getMessage("PL-BI-2005"); log.info(message + ": " + e.getMessage(), e.fillInStackTrace()); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; } // try { fileCmp.putForCreate(contentType, new CloseShieldInputStream(inputStream)); } catch (Exception e) { String message = DcCoreMessageUtils.getMessage("PL-BI-2004"); log.info(message + ": " + e.getMessage(), e.fillInStackTrace()); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; } return true; }
From source file:com.fujitsu.dc.core.bar.BarFileReadRunner.java
/** * 00_$metadata_xml??????./*from w w w . j av a2 s.co m*/ * @param entryName ?? * @param inputStream * @param davCmp Collection? * @return ????true */ protected boolean registUserSchema(String entryName, InputStream inputStream, DavCmpEsImpl davCmp) { EdmDataServices metadata = null; // XML(StAX,SAX,DOM)?InputStream??????????? // ????????????????? try { InputStreamReader isr = new InputStreamReader(new CloseShieldInputStream(inputStream)); // 00_$metadata.xml???? XMLFactoryProvider2 provider = StaxXMLFactoryProvider2.getInstance(); XMLInputFactory2 factory = provider.newXMLInputFactory2(); XMLEventReader2 reader = factory.createXMLEventReader(isr); DcEdmxFormatParser parser = new DcEdmxFormatParser(); metadata = parser.parseMetadata(reader); } catch (Exception ex) { log.info("XMLParseException: " + ex.getMessage(), ex.fillInStackTrace()); String message = DcCoreMessageUtils.getMessage("PL-BI-2002"); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; } catch (StackOverflowError tw) { // ComplexType???StackOverFlowError?? log.info("XMLParseException: " + tw.getMessage(), tw.fillInStackTrace()); String message = DcCoreMessageUtils.getMessage("PL-BI-2002"); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; } // Entity/Property? // Property/ComplexProperty????ComplexType??????? // ??ComplexType????EntityType? // DcODataProducer producer = davCmp.getODataProducer(); try { createComplexTypes(metadata, davCmp); createEntityTypes(metadata, davCmp); createAssociations(metadata, davCmp); } catch (DcCoreException e) { writeOutputStream(true, "PL-BI-1004", entryName, e.getMessage()); log.info("DcCoreException: " + e.getMessage()); return false; } catch (Exception e) { log.info("Regist Entity Error: " + e.getMessage(), e.fillInStackTrace()); String message = DcCoreMessageUtils.getMessage("PL-BI-2003"); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; } return true; }
From source file:io.personium.core.bar.BarFileReadRunner.java
/** * 00_$metadata_xml??????./* w ww . j a v a 2 s . c om*/ * @param entryName ?? * @param inputStream * @param davCmp Collection? * @return ????true */ protected boolean registUserSchema(String entryName, InputStream inputStream, DavCmp davCmp) { EdmDataServices metadata = null; // XML(StAX,SAX,DOM)?InputStream??????????? // ????????????????? try { InputStreamReader isr = new InputStreamReader(new CloseShieldInputStream(inputStream)); // 00_$metadata.xml???? XMLFactoryProvider2 provider = StaxXMLFactoryProvider2.getInstance(); XMLInputFactory2 factory = provider.newXMLInputFactory2(); XMLEventReader2 reader = factory.createXMLEventReader(isr); PersoniumEdmxFormatParser parser = new PersoniumEdmxFormatParser(); metadata = parser.parseMetadata(reader); } catch (Exception ex) { log.info("XMLParseException: " + ex.getMessage(), ex.fillInStackTrace()); String message = PersoniumCoreMessageUtils.getMessage("PL-BI-2002"); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; } catch (StackOverflowError tw) { // ComplexType???StackOverFlowError?? log.info("XMLParseException: " + tw.getMessage(), tw.fillInStackTrace()); String message = PersoniumCoreMessageUtils.getMessage("PL-BI-2002"); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; } // Entity/Property? // Property/ComplexProperty????ComplexType??????? // ??ComplexType????EntityType? // PersoniumODataProducer producer = davCmp.getODataProducer(); try { createComplexTypes(metadata, davCmp); createEntityTypes(metadata, davCmp); createAssociations(metadata, davCmp); } catch (PersoniumCoreException e) { writeOutputStream(true, "PL-BI-1004", entryName, e.getMessage()); log.info("PersoniumCoreException: " + e.getMessage()); return false; } catch (Exception e) { log.info("Regist Entity Error: " + e.getMessage(), e.fillInStackTrace()); String message = PersoniumCoreMessageUtils.getMessage("PL-BI-2003"); writeOutputStream(true, "PL-BI-1004", entryName, message); return false; } return true; }
From source file:net.staticsnow.nexus.repository.apt.internal.hosted.AptHostedFacet.java
@Transactional(retryOn = { ONeedRetryException.class }) public void ingestAsset(Payload body) throws IOException, PGPException { AptFacet aptFacet = getRepository().facet(AptFacet.class); StorageTx tx = UnitOfWork.currentTx(); Bucket bucket = tx.findBucket(getRepository()); ControlFile control = null;//w w w.jav a 2s .co m try (TempStreamSupplier supplier = new TempStreamSupplier(body.openInputStream()); ArArchiveInputStream is = new ArArchiveInputStream(supplier.get())) { ArchiveEntry debEntry; while ((debEntry = is.getNextEntry()) != null) { InputStream controlStream; switch (debEntry.getName()) { case "control.tar": controlStream = new CloseShieldInputStream(is); break; case "control.tar.gz": controlStream = new GZIPInputStream(new CloseShieldInputStream(is)); break; case "control.tar.xz": controlStream = new XZCompressorInputStream(new CloseShieldInputStream(is)); default: continue; } try (TarArchiveInputStream controlTarStream = new TarArchiveInputStream(controlStream)) { ArchiveEntry tarEntry; while ((tarEntry = controlTarStream.getNextEntry()) != null) { if (tarEntry.getName().equals("control") || tarEntry.getName().equals("./control")) { control = new ControlFileParser().parseControlFile(controlTarStream); } } } } if (control == null) { throw new IllegalOperationException("Invalid Debian package supplied"); } String name = control.getField("Package").map(f -> f.value).get(); String version = control.getField("Version").map(f -> f.value).get(); String architecture = control.getField("Architecture").map(f -> f.value).get(); String assetName = name + "_" + version + "_" + architecture + ".deb"; String assetPath = "pool/" + name.substring(0, 1) + "/" + name + "/" + assetName; Content content = aptFacet.put(assetPath, new StreamPayload(() -> supplier.get(), body.getSize(), body.getContentType())); Asset asset = Content.findAsset(tx, bucket, content); String indexSection = buildIndexSection(control, asset.size(), asset.getChecksums(FacetHelper.hashAlgorithms), assetPath); asset.formatAttributes().set(P_ARCHITECTURE, architecture); asset.formatAttributes().set(P_PACKAGE_NAME, name); asset.formatAttributes().set(P_PACKAGE_VERSION, version); asset.formatAttributes().set(P_INDEX_SECTION, indexSection); asset.formatAttributes().set(P_ASSET_KIND, "DEB"); tx.saveAsset(asset); List<AssetChange> changes = new ArrayList<>(); changes.add(new AssetChange(AssetAction.ADDED, asset)); for (Asset removed : selectOldPackagesToRemove(name, architecture)) { tx.deleteAsset(removed); changes.add(new AssetChange(AssetAction.REMOVED, removed)); } rebuildIndexesInTransaction(tx, changes.stream().toArray(AssetChange[]::new)); } }
From source file:net.yacy.document.TextParser.java
private static Document[] parseSource(final DigestURL location, String mimeType, final String charset, final Set<String> ignore_class_name, final VocabularyScraper scraper, final int timezoneOffset, final int depth, final long contentLength, final InputStream sourceStream, final int maxLinks, final long maxBytes) throws Parser.Failure { if (AbstractParser.log.isFine()) AbstractParser.log.fine("Parsing '" + location + "' from stream"); mimeType = normalizeMimeType(mimeType); Set<Parser> idioms = null; try {//from ww w . j a v a 2s . c o m idioms = parsers(location, mimeType); } catch (final Parser.Failure e) { final String errorMsg = "Parser Failure for extension '" + MultiProtocolURL.getFileExtension(location.getFileName()) + "' or mimetype '" + mimeType + "': " + e.getMessage(); AbstractParser.log.warn(errorMsg); throw new Parser.Failure(errorMsg, location); } assert !idioms.isEmpty() : "no parsers applied for url " + location.toNormalform(true); boolean canStream = false; if (idioms.size() == 1) { canStream = true; } else if (idioms.size() == 2) { /* When there are only 2 available parsers, stream oriented parsing can still be applied when one of the 2 parsers is the generic one */ for (Parser idiom : idioms) { if (idiom instanceof genericParser) { canStream = true; } } } else if (sourceStream instanceof ByteArrayInputStream) { /* Also check if we have a ByteArrayInputStream as source to prevent useless bytes duplication in a new byte array */ canStream = true; } // if we do not have more than one non generic parser, or the content size is over MaxInt (2GB), or is over the totally available memory, // or stream is already in memory as a ByteArrayInputStream // then we use only stream-oriented parser. if (canStream || contentLength > Integer.MAX_VALUE || contentLength > MemoryControl.available()) { try { /* The size of the buffer on the stream must be large enough to allow parser implementations to start parsing the resource * and eventually fail, but must also be larger than eventual parsers internal buffers such as BufferedInputStream.DEFAULT_BUFFER_SIZE (8192 bytes) */ int rewindSize = 10 * 1024; final InputStream markableStream; if (sourceStream instanceof ByteArrayInputStream) { /* No nead to use a wrapping buffered stream when the source is already entirely in memory. * What's more, ByteArrayInputStream has no read limit when marking.*/ markableStream = sourceStream; } else { markableStream = new BufferedInputStream(sourceStream, rewindSize); } /* Mark now to allow resetting the buffered stream to the beginning of the stream */ markableStream.mark(rewindSize); /* Loop on parser : they are supposed to be sorted in order to start with the most specific and end with the most generic */ for (Parser parser : idioms) { /* Wrap in a CloseShieldInputStream to prevent SAX parsers closing the sourceStream * and so let us eventually reuse the same opened stream with other parsers on parser failure */ CloseShieldInputStream nonCloseInputStream = new CloseShieldInputStream(markableStream); try { return parseSource(location, mimeType, parser, charset, ignore_class_name, scraper, timezoneOffset, nonCloseInputStream, maxLinks, maxBytes); } catch (Parser.Failure e) { /* Try to reset the marked stream. If the failed parser has consumed too many bytes : * too bad, the marks is invalid and process fails now with an IOException */ markableStream.reset(); if (parser instanceof gzipParser && e.getCause() instanceof GZIPOpeningStreamException && (idioms.size() == 1 || (idioms.size() == 2 && idioms.contains(genericIdiom)))) { /* The gzip parser failed directly when opening the content stream : before falling back to the generic parser, * let's have a chance to parse the stream as uncompressed. */ /* Indeed, this can be a case of misconfigured web server, providing both headers "Content-Encoding" with value "gzip", * and "Content-type" with value such as "application/gzip". * In that case our HTTP client (see GzipResponseInterceptor) is already uncompressing the stream on the fly, * that's why the gzipparser fails opening the stream. * (see RFC 7231 section 3.1.2.2 for "Content-Encoding" header specification https://tools.ietf.org/html/rfc7231#section-3.1.2.2)*/ gzipParser gzParser = (gzipParser) parser; nonCloseInputStream = new CloseShieldInputStream(markableStream); Document maindoc = gzipParser.createMainDocument(location, mimeType, charset, gzParser); try { Document[] docs = gzParser.parseCompressedInputStream(location, charset, timezoneOffset, depth, nonCloseInputStream, maxLinks, maxBytes); if (docs != null) { maindoc.addSubDocuments(docs); } return new Document[] { maindoc }; } catch (Exception e1) { /* Try again to reset the marked stream if the failed parser has not consumed too many bytes */ markableStream.reset(); } } } } } catch (IOException e) { throw new Parser.Failure("Error reading source", location); } } // in case that we know more parsers we first transform the content into a byte[] and use that as base // for a number of different parse attempts. int maxBytesToRead = -1; if (maxBytes < Integer.MAX_VALUE) { /* Load at most maxBytes + 1 : - to let parsers not supporting Parser.parseWithLimits detect the maxBytes size is exceeded and end with a Parser.Failure - but let parsers supporting Parser.parseWithLimits perform partial parsing of maxBytes content */ maxBytesToRead = (int) maxBytes + 1; } if (contentLength >= 0 && contentLength < maxBytesToRead) { maxBytesToRead = (int) contentLength; } byte[] b = null; try { b = FileUtils.read(sourceStream, maxBytesToRead); } catch (final IOException e) { throw new Parser.Failure(e.getMessage(), location); } Document[] docs = parseSource(location, mimeType, idioms, charset, ignore_class_name, scraper, timezoneOffset, depth, b, maxLinks, maxBytes); return docs; }
From source file:nl.ordina.bag.etl.xml.BatchExtractParser.java
public void parse(InputStream is) throws ParseException, HandlerException { try {/*from w ww .j a v a 2 s. co m*/ BAGExtractDeelbestandLVC extract = objectBuilder.handle(new CloseShieldInputStream(is)); parse(extract); } catch (XMLStreamException | FactoryConfigurationError | JAXBException e) { throw new ParseException(e); } }
From source file:nl.ordina.bag.etl.xml.SimpleMutatiesParser.java
public void parse(InputStream is) throws ParseException, HandlerException { try {/*from w w w . j ava 2 s .co m*/ BAGMutatiesDeelbestandLVC mutaties = objectBuilder.handle(new CloseShieldInputStream(is)); parse(mutaties); } catch (XMLStreamException | FactoryConfigurationError | JAXBException e) { throw new ParseException(e); } }
From source file:org.apache.jackrabbit.core.util.db.StreamWrapper.java
public InputStream getStream() { return new CloseShieldInputStream(stream); }
From source file:org.apache.sling.jcr.contentloader.internal.readers.ZipReader.java
/** * @see org.apache.sling.jcr.contentloader.ContentReader#parse(java.io.InputStream, org.apache.sling.jcr.contentloader.ContentCreator) *///www . j a v a2 s . com public void parse(InputStream ins, ContentCreator creator) throws IOException, RepositoryException { try { creator.createNode(null, NT_FOLDER, null); final ZipInputStream zis = new ZipInputStream(ins); ZipEntry entry; do { entry = zis.getNextEntry(); if (entry != null) { if (!entry.isDirectory()) { String name = entry.getName(); int pos = name.lastIndexOf('/'); if (pos != -1) { creator.switchCurrentNode(name.substring(0, pos), NT_FOLDER); } creator.createFileAndResourceNode(name, new CloseShieldInputStream(zis), null, entry.getTime()); creator.finishNode(); creator.finishNode(); if (pos != -1) { creator.finishNode(); } } zis.closeEntry(); } } while (entry != null); creator.finishNode(); } finally { if (ins != null) { try { ins.close(); } catch (IOException ignore) { } } } }