List of usage examples for com.google.common.io Closeables closeQuietly
public static void closeQuietly(@Nullable Reader reader)
From source file:interactivespaces.workbench.project.jdom.JdomReader.java
/** * Get the root element for a given input file. * * @param inputFile/*from w w w.j ava2s .co m*/ * input project file * * @return top-level element */ Element getRootElement(File inputFile) { Document doc; FileInputStream inputStream = null; try { inputStream = new FileInputStream(inputFile); SAXBuilder builder = new SAXBuilder(); doc = builder.build(inputStream); } catch (Exception e) { throw new InteractiveSpacesException( String.format("Exception while processing %s", inputFile.getAbsolutePath()), e); } finally { Closeables.closeQuietly(inputStream); } return doc.getRootElement(); }
From source file:org.jclouds.encryption.bouncycastle.BouncyCastleEncryptionService.java
public byte[] md5(InputStream toEncode) { MD5Digest eTag = new MD5Digest(); byte[] resBuf = new byte[eTag.getDigestSize()]; byte[] buffer = new byte[1024]; int numRead = -1; try {//from w ww . j a v a 2 s .c o m do { numRead = toEncode.read(buffer); if (numRead > 0) { eTag.update(buffer, 0, numRead); } } while (numRead != -1); } catch (IOException e) { throw new RuntimeException(e); } finally { Closeables.closeQuietly(toEncode); } eTag.doFinal(resBuf, 0); return resBuf; }
From source file:co.cask.cdap.data.stream.service.upload.FileContentWriter.java
@Override public void cancel() { Closeables.closeQuietly(writer); Locations.deleteQuietly(Locations.getParent(eventFile), true); }
From source file:de.blizzy.documentr.search.GetSearchHitTask.java
@Override public SearchHit call() throws IOException { Formatter formatter = new SimpleHTMLFormatter("<strong>", "</strong>"); //$NON-NLS-1$ //$NON-NLS-2$ Scorer scorer = new QueryScorer(query); Highlighter highlighter = new Highlighter(formatter, scorer); highlighter.setTextFragmenter(new SimpleFragmenter(FRAGMENT_SIZE)); highlighter.setEncoder(new SimpleHTMLEncoder()); Document doc = reader.document(docId); String projectName = doc.get(PageIndex.PROJECT); String branchName = doc.get(PageIndex.BRANCH); String path = doc.get(PageIndex.PATH); String title = doc.get(PageIndex.TITLE); String text = doc.get(PageIndex.TEXT); String[] tagsArray = doc.getValues(PageIndex.TAG); List<String> tags = Lists.newArrayList(tagsArray); Collections.sort(tags);/*from w ww . ja va2s .com*/ TokenStream tokenStream = null; String highlightedText = StringUtils.EMPTY; try { tokenStream = TokenSources.getAnyTokenStream(reader, docId, PageIndex.TEXT, doc, analyzer); String[] fragments = highlighter.getBestFragments(tokenStream, text, NUM_FRAGMENTS); cleanupFragments(fragments); highlightedText = Util.join(fragments, " <strong>...</strong> "); //$NON-NLS-1$ } catch (InvalidTokenOffsetsException e) { // ignore } finally { Closeables.closeQuietly(tokenStream); } return new SearchHit(projectName, branchName, path, title, highlightedText, tags); }
From source file:com.google.openrtb.json.OpenRtbNativeJsonReader.java
/** * Desserializes a {@link NativeRequest} from JSON, streamed from an {@link InputStream}. *///from w w w. j a v a2 s. c o m public NativeRequest readNativeRequest(InputStream is) throws IOException { try { return ProtoUtils.built(readNativeRequest(factory().getJsonFactory().createParser(is))); } finally { Closeables.closeQuietly(is); } }
From source file:com.metamx.druid.loading.S3SegmentPusher.java
@Override public DataSegment push(File file, DataSegment segment) throws IOException { log.info("Uploading [%s] to S3", file); String outputKey = JOINER.join(config.getBaseKey().isEmpty() ? null : config.getBaseKey(), segment.getDataSource(),//from w w w. ja v a2 s .c om String.format("%s_%s", segment.getInterval().getStart(), segment.getInterval().getEnd()), segment.getVersion(), segment.getShardSpec().getPartitionNum()); File indexFilesDir = file; long indexSize = 0; final File zipOutFile = File.createTempFile("druid", "index.zip"); ZipOutputStream zipOut = null; try { zipOut = new ZipOutputStream(new FileOutputStream(zipOutFile)); File[] indexFiles = indexFilesDir.listFiles(); for (File indexFile : indexFiles) { log.info("Adding indexFile[%s] with size[%,d]. Total size[%,d]", indexFile, indexFile.length(), indexSize); if (indexFile.length() >= Integer.MAX_VALUE) { throw new ISE("indexFile[%s] too large [%,d]", indexFile, indexFile.length()); } zipOut.putNextEntry(new ZipEntry(indexFile.getName())); IOUtils.copy(new FileInputStream(indexFile), zipOut); indexSize += indexFile.length(); } } finally { Closeables.closeQuietly(zipOut); } try { S3Object toPush = new S3Object(zipOutFile); final String outputBucket = config.getBucket(); toPush.setBucketName(outputBucket); toPush.setKey(outputKey + "/index.zip"); log.info("Pushing %s.", toPush); s3Client.putObject(outputBucket, toPush); DataSegment outputSegment = segment.withSize(indexSize).withLoadSpec(ImmutableMap .<String, Object>of("type", "s3_zip", "bucket", outputBucket, "key", toPush.getKey())); File descriptorFile = File.createTempFile("druid", "descriptor.json"); StreamUtils.copyToFileAndClose(new ByteArrayInputStream(jsonMapper.writeValueAsBytes(segment)), descriptorFile); S3Object descriptorObject = new S3Object(descriptorFile); descriptorObject.setBucketName(outputBucket); descriptorObject.setKey(outputKey + "/descriptor.json"); log.info("Pushing %s", descriptorObject); s3Client.putObject(outputBucket, descriptorObject); log.info("Deleting Index File[%s]", indexFilesDir); FileUtils.deleteDirectory(indexFilesDir); log.info("Deleting zipped index File[%s]", zipOutFile); zipOutFile.delete(); log.info("Deleting descriptor file[%s]", descriptorFile); descriptorFile.delete(); return outputSegment; } catch (NoSuchAlgorithmException e) { throw new IOException(e); } catch (S3ServiceException e) { throw new IOException(e); } }
From source file:com.cloudera.cdk.morphline.stdio.AbstractParser.java
@Override protected boolean doProcess(Record record) { if (!hasAtLeastOneAttachment(record)) { return false; }// w w w . j av a 2s . com // TODO: make field for stream configurable String streamMediaType = (String) record.getFirstValue(Fields.ATTACHMENT_MIME_TYPE); if (!isMimeTypeSupported(streamMediaType, record)) { return false; } InputStream stream = getAttachmentInputStream(record); try { return doProcess(record, stream); } catch (IOException e) { throw new MorphlineRuntimeException(e); } finally { Closeables.closeQuietly(stream); } }
From source file:com.netflix.curator.x.discovery.details.ServiceCacheImpl.java
@Override public void close() throws IOException { Preconditions.checkState(state.compareAndSet(State.STARTED, State.STOPPED), "Already closed or has not been started"); listenerContainer.forEach(new Function<ServiceCacheListener, Void>() { @Override//from w ww . j a v a 2s .co m public Void apply(ServiceCacheListener listener) { discovery.getClient().getConnectionStateListenable().removeListener(listener); return null; } }); listenerContainer.clear(); Closeables.closeQuietly(cache); discovery.cacheClosed(this); }
From source file:com.netflix.curator.framework.imps.CuratorTempFrameworkImpl.java
private synchronized void closeClient() { if (cleanup != null) { cleanup.shutdownNow();/*from w w w. j av a 2 s . co m*/ cleanup = null; } if (client != null) { Closeables.closeQuietly(client); client = null; } }
From source file:org.fusesource.process.manager.support.JarInstaller.java
private File getArtifactFile(URL url) throws IOException { File tmpFile = File.createTempFile("artifact", ".jar"); FileOutputStream fos = null;/*from w w w . ja v a 2 s. co m*/ try { fos = new FileOutputStream(tmpFile); Resources.copy(url, fos); } catch (Exception ex) { Throwables.propagate(ex); } finally { Closeables.closeQuietly(fos); } return tmpFile; }