List of usage examples for com.google.common.io Closeables close
public static void close(@Nullable Closeable closeable, boolean swallowIOException) throws IOException
From source file:org.apache.mahout.classifier.df.DecisionForest.java
/** * Load the forest from a single file or a directory of files * @throws java.io.IOException//from w w w .j av a 2 s . co m */ public static DecisionForest load(Configuration conf, Path forestPath) throws IOException { FileSystem fs = forestPath.getFileSystem(conf); Path[] files; if (fs.getFileStatus(forestPath).isDir()) { files = DFUtils.listOutputFiles(fs, forestPath); } else { files = new Path[] { forestPath }; } DecisionForest forest = null; for (Path path : files) { FSDataInputStream dataInput = new FSDataInputStream(fs.open(path)); try { if (forest == null) { forest = read(dataInput); } else { forest.readFields(dataInput); } } finally { Closeables.close(dataInput, true); } } return forest; }
From source file:com.b2international.snowowl.snomed.reasoner.server.ontology.SnomedOntologyService.java
/** * /* w ww .j a v a 2 s. com*/ * @param ontology * @param exportType * @param outputFile * @throws IOException */ public void saveOntology(final OWLOntology ontology, final SnomedOntologyExportType exportType, final File outputFile) throws IOException { OutputStream outputStream = null; try { final OWLOntologyFormat ontologyFormat = exportType.getFormat(); if (ontologyFormat.isPrefixOWLOntologyFormat()) { ontologyFormat.asPrefixOWLOntologyFormat() .copyPrefixesFrom(SnomedOntologyUtils.createPrefixManager(ontology)); } outputStream = new FileOutputStream(outputFile); outputStream = new BufferedOutputStream(outputStream); manager.saveOntology(ontology, ontologyFormat, outputStream); } catch (final IOException e) { throw new OntologyException(e); } catch (final OWLOntologyStorageException e) { throw new OntologyException(e); } finally { Closeables.close(outputStream, true); } }
From source file:com.google.dart.compiler.backend.js.JavascriptBackend.java
@Override public void packageApp(LibrarySource app, Collection<LibraryUnit> libraries, DartCompilerContext context, CoreTypeProvider typeProvider) throws IOException { List<SourceMapSection> appSections = Lists.newArrayList(); Writer out = context.getArtifactWriter(app, "", EXTENSION_APP_JS); boolean failed = true; try {//from ww w . ja v a 2 s . com // Emit the concatenated Javascript sources in dependency order. packageLibs(out, appSections, context); writeEntryPointCall(getMangledEntryPoint(context), out); failed = false; } finally { Closeables.close(out, failed); } Writer srcMapOut = context.getArtifactWriter(app, "", EXTENSION_APP_JS_SRC_MAP); failed = true; try { // TODO(johnlenz): settle how we want to get a reference to the app // output. Do we want this to be a filename, a URL, both? new GenerateSourceMap().appendIndexMapTo(srcMapOut, app.getName() + "." + EXTENSION_JS, appSections); failed = false; } finally { Closeables.close(srcMapOut, failed); } }
From source file:eu.interedition.text.Text.java
public void read(final XMLStreamWriter xml) throws IOException, XMLStreamException { Preconditions.checkArgument(getType() == Text.Type.XML); Reader xmlStream = null;//from ww w . java 2s . co m XMLEventReader xmlReader = null; XMLEventWriter xmlWriter = null; try { xmlReader = XML_INPUT_FACTORY.createXMLEventReader(xmlStream = read().getInput()); xmlWriter = XML_OUTPUT_FACTORY.createXMLEventWriter(xml); xmlWriter.add(xmlReader); } catch (XMLStreamException e) { throw Throwables.propagate(e); } finally { XML.closeQuietly(xmlWriter); XML.closeQuietly(xmlReader); Closeables.close(xmlStream, false); } }
From source file:org.apache.mahout.math.hadoop.TimesSquaredJob.java
public static Vector retrieveTimesSquaredOutputVector(Path outputVectorTmpPath, Configuration conf) throws IOException { Path outputFile = new Path(outputVectorTmpPath, OUTPUT_VECTOR_FILENAME + "/part-r-00000"); SequenceFileValueIterator<VectorWritable> iterator = new SequenceFileValueIterator<VectorWritable>( outputFile, true, conf);// ww w . j a va 2 s. c o m try { return iterator.next().get(); } finally { Closeables.close(iterator, true); } }
From source file:com.android.tools.internal.artifacts.PomHandler.java
/** * Loads the DOM for a given file and returns a {@link org.w3c.dom.Document} object. * @param file the file to parse/* www . j a va2s . c o m*/ * @return a Document object. * @throws java.io.IOException */ static Document parseDocument(File file) throws IOException { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); BufferedInputStream stream = new BufferedInputStream(new FileInputStream(file)); InputSource is = new InputSource(stream); factory.setNamespaceAware(true); factory.setValidating(false); try { DocumentBuilder builder = factory.newDocumentBuilder(); return builder.parse(is); } catch (ParserConfigurationException e) { throw new IOException(e); } catch (SAXException e) { throw new IOException(e); } finally { Closeables.close(stream, true /* swallowIOException */); } }
From source file:org.apache.mahout.utils.vectors.lucene.ClusterLabels.java
public void getLabels() throws IOException { Writer writer;//from w w w.j av a 2 s . c o m if (this.output == null) { writer = new OutputStreamWriter(System.out, Charsets.UTF_8); } else { writer = Files.newWriter(new File(this.output), Charsets.UTF_8); } try { for (Map.Entry<Integer, List<WeightedPropertyVectorWritable>> integerListEntry : clusterIdToPoints .entrySet()) { List<WeightedPropertyVectorWritable> wpvws = integerListEntry.getValue(); List<TermInfoClusterInOut> termInfos = getClusterLabels(integerListEntry.getKey(), wpvws); if (termInfos != null) { writer.write('\n'); writer.write("Top labels for Cluster "); writer.write(String.valueOf(integerListEntry.getKey())); writer.write(" containing "); writer.write(String.valueOf(wpvws.size())); writer.write(" vectors"); writer.write('\n'); writer.write("Term \t\t LLR \t\t In-ClusterDF \t\t Out-ClusterDF "); writer.write('\n'); for (TermInfoClusterInOut termInfo : termInfos) { writer.write(termInfo.getTerm()); writer.write("\t\t"); writer.write(String.valueOf(termInfo.getLogLikelihoodRatio())); writer.write("\t\t"); writer.write(String.valueOf(termInfo.getInClusterDF())); writer.write("\t\t"); writer.write(String.valueOf(termInfo.getOutClusterDF())); writer.write('\n'); } } } } finally { Closeables.close(writer, false); } }
From source file:org.qcri.pca.VarianceJob.java
public void loadResult(Path outDirPath, Configuration conf) throws IOException { Path finalNumberFile = new Path(outDirPath, "part-r-00000"); SequenceFileIterator<NullWritable, DoubleWritable> iterator = new SequenceFileIterator<NullWritable, DoubleWritable>( finalNumberFile, true, conf); try {/*from w ww. j a va 2 s . c om*/ Pair<NullWritable, DoubleWritable> next = iterator.next(); finalSum = next.getSecond().get(); } finally { Closeables.close(iterator, false); } }
From source file:org.apache.mahout.clustering.streaming.mapreduce.StreamingKMeansUtilsMR.java
public static void writeVectorsToSequenceFile(Iterable<? extends Vector> datapoints, Path path, Configuration conf) throws IOException { SequenceFile.Writer writer = null; try {//from w w w.j a va2 s.c o m writer = SequenceFile.createWriter(FileSystem.get(conf), conf, path, IntWritable.class, VectorWritable.class); int i = 0; for (Vector vector : datapoints) { writer.append(new IntWritable(i++), new VectorWritable(vector)); } } finally { Closeables.close(writer, true); } }
From source file:org.apache.mahout.math.hadoop.similarity.cooccurrence.Vectors.java
public static Vector read(Path path, Configuration conf) throws IOException { FileSystem fs = FileSystem.get(path.toUri(), conf); FSDataInputStream in = fs.open(path); try {/*from ww w. j a v a 2 s . co m*/ return VectorWritable.readVector(in); } finally { Closeables.close(in, true); } }