List of usage examples for com.google.common.io Closeables close
public static void close(@Nullable Closeable closeable, boolean swallowIOException) throws IOException
From source file:com.ml.ira.algos.TrainLogistic.java
static void mainToOutput(String[] args, PrintWriter output) throws Exception { if (parseArgs(args)) { double logPEstimate = 0; int samples = 0; System.out.println("fieldNames: " + fieldNames); long ts = System.currentTimeMillis(); CsvRecordFactory csv = lmp.getCsvRecordFactory(); OnlineLogisticRegression lr = lmp.createRegression(); for (int pass = 0; pass < passes; pass++) { System.out.println("at Round: " + pass); BufferedReader in = open(inputFile); try { // read variable names String line;//from w w w . j a va 2 s. co m if (fieldNames != null && fieldNames.length() > 0) { csv.firstLine(fieldNames); } else { csv.firstLine(in.readLine()); } line = in.readLine(); while (line != null) { // for each new line, get target and predictors Vector input = new RandomAccessSparseVector(lmp.getNumFeatures()); int targetValue = csv.processLine(line, input); // check performance while this is still news double logP = lr.logLikelihood(targetValue, input); if (!Double.isInfinite(logP)) { if (samples < 20) { logPEstimate = (samples * logPEstimate + logP) / (samples + 1); } else { logPEstimate = 0.95 * logPEstimate + 0.05 * logP; } samples++; } double p = lr.classifyScalar(input); if (scores) { output.printf(Locale.ENGLISH, "%10d %2d %10.2f %2.4f %10.4f %10.4f%n", samples, targetValue, lr.currentLearningRate(), p, logP, logPEstimate); } // now update model lr.train(targetValue, input); line = in.readLine(); } } finally { Closeables.close(in, true); } output.println("duration: " + (System.currentTimeMillis() - ts)); } if (outputFile.startsWith("hdfs://")) { lmp.saveTo(new Path(outputFile)); } else { OutputStream modelOutput = new FileOutputStream(outputFile); try { lmp.saveTo(modelOutput); } finally { Closeables.close(modelOutput, false); } } output.println("duration: " + (System.currentTimeMillis() - ts)); output.println(lmp.getNumFeatures()); output.println(lmp.getTargetVariable() + " ~ "); String sep = ""; for (String v : csv.getTraceDictionary().keySet()) { double weight = predictorWeight(lr, 0, csv, v); if (weight != 0) { output.printf(Locale.ENGLISH, "%s%.3f*%s", sep, weight, v); sep = " + "; } } output.printf("%n"); model = lr; for (int row = 0; row < lr.getBeta().numRows(); row++) { for (String key : csv.getTraceDictionary().keySet()) { double weight = predictorWeight(lr, row, csv, key); if (weight != 0) { output.printf(Locale.ENGLISH, "%20s %.5f%n", key, weight); } } for (int column = 0; column < lr.getBeta().numCols(); column++) { output.printf(Locale.ENGLISH, "%15.9f ", lr.getBeta().get(row, column)); } output.println(); } } }
From source file:org.asoem.greyfish.utils.persistence.Persisters.java
public static <T> T deserialize(final Persister persister, final InputSupplier<? extends InputStream> inputSupplier, final Class<T> clazz) throws IOException, ClassCastException, ClassNotFoundException { final InputStream input = inputSupplier.getInput(); boolean threw = true; try {//ww w . j a v a 2s . c o m final T object = persister.deserialize(input, clazz); threw = false; return object; } finally { Closeables.close(input, threw); } }
From source file:com.netflix.bdp.s3.S3Util.java
static List<PendingUpload> readPendingCommits(FileSystem fs, Path pendingCommitsFile) throws IOException { List<PendingUpload> commits = Lists.newArrayList(); ObjectInputStream in = new ObjectInputStream(fs.open(pendingCommitsFile)); boolean threw = true; try {/*w w w . j av a2s . c om*/ for (PendingUpload commit : new ObjectIterator<PendingUpload>(in)) { commits.add(commit); } threw = false; } finally { Closeables.close(in, threw); } return commits; }
From source file:org.semanticweb.owlapi.io.XZStreamDocumentTarget.java
@Override public void close() throws Exception { XZOutputStream toReturn = xzOutputStream; xzOutputStream = null; Closeables.close(toReturn, false); }
From source file:com.endpoint.lg.media.service.MPlayerFifoManagedResource.java
@Override public void shutdown() { try {//from w w w . jav a2 s.c om Closeables.close(printStream, true); } catch (IOException e) { log.warn(e); } mplayerFifo.delete(); }
From source file:com.uber.stream.kafka.mirrormaker.manager.reporter.HelixKafkaMirrorMakerMetricsReporter.java
HelixKafkaMirrorMakerMetricsReporter(ManagerConf config) { final String environment = config.getEnvironment(); final String clientId = config.getManagerInstanceId(); String[] dcNenv = parse(environment); if (dcNenv == null) { LOGGER.error("Error parsing environment info"); _registry = null;/*from w w w. ja va 2s . co m*/ _graphiteReporter = null; _jmxReporter = null; _reporterMetricPrefix = null; return; } _reporterMetricPrefix = String.format("stats.%s.counter.%s.%s.%s", dcNenv[0], config.getMetricsPrefix(), dcNenv[1], clientId); LOGGER.info("Reporter Metric Prefix is : " + _reporterMetricPrefix); _registry = new MetricRegistry(); final Boolean enabledGraphiteReporting = true; final Boolean enabledJmxReporting = true; final long graphiteReportFreqSec = 60L; // Init jmx reporter if (enabledJmxReporting) { _jmxReporter = JmxReporter.forRegistry(this._registry).build(); _jmxReporter.start(); } else { _jmxReporter = null; } // Init graphite reporter if (enabledGraphiteReporting) { Graphite graphite = getGraphite(config); if (graphite == null) { _graphiteReporter = null; } else { _graphiteReporter = GraphiteReporter.forRegistry(_registry).prefixedWith(_reporterMetricPrefix) .convertRatesTo(TimeUnit.SECONDS).convertDurationsTo(TimeUnit.MILLISECONDS) .filter(MetricFilter.ALL).build(graphite); _graphiteReporter.start(graphiteReportFreqSec, TimeUnit.SECONDS); } } else { _graphiteReporter = null; } Runtime.getRuntime().addShutdownHook(new Thread() { public void run() { try { if (enabledJmxReporting) { Closeables.close(_jmxReporter, true); } if (enabledGraphiteReporting) { Closeables.close(_graphiteReporter, true); } } catch (Exception e) { LOGGER.error("Error while closing Jmx and Graphite reporters.", e); } } }); }
From source file:com.imminentmeals.android.base.utilities.database.ActiveRecord.java
public void reload() { if (_id == 0) return;//from w ww. j a v a 2 s.c o m Cursor cursor = null; try { cursor = getContentResolver().query(_content_uri.buildUpon().appendPath(Long.toString(_id)).build(), projection(), null, null, null); if (cursor.moveToFirst()) { setPropertiesFromCursor(cursor); makeDirty(false); } } finally { try { Closeables.close(cursor, true); } catch (IOException _) { } } }
From source file:com.b2international.snowowl.snomed.importer.rf2.net4j.SnomedSubsetImportIndication.java
@Override protected void indicating(final ExtendedDataInputStream in, final OMMonitor monitor) throws Exception { //e.g.: MAIN, or MAIN/NRC_1/TASK_300 branchPath = in.readUTF();// www . j av a2 s .co m //read requesting user ID userId = in.readUTF(); hasHeader = in.readBoolean(); skipEmptyLines = in.readBoolean(); idColumnNumber = in.readInt(); firstConceptRowNumber = in.readInt(); sheetNumber = in.readInt(); refSetParent = in.readUTF(); subsetName = in.readUTF(); fileExtension = in.readUTF(); effectiveTime = in.readUTF(); namespace = in.readUTF(); moduleId = in.readUTF(); languageRefSetId = in.readUTF(); fieldSeparator = in.readUTF(); quoteCharacter = in.readUTF(); lineFeedCharacter = in.readUTF(); long size = in.readLong(); final String fileName = in.readUTF(); monitor.begin(size); BufferedOutputStream out = null; file = new File(TEMP_DIR, fileName); try { out = new BufferedOutputStream(new FileOutputStream(file)); while (size != 0L) { final byte[] buffer = in.readByteArray(); final int chunk = buffer.length; monitor.worked(chunk); out.write(buffer); size -= chunk; } } finally { monitor.done(); Closeables.close(out, true); } }
From source file:org.apache.mahout.classifier.df.DFUtils.java
public static void storeWritable(Configuration conf, Path path, Writable writable) throws IOException { FileSystem fs = path.getFileSystem(conf); FSDataOutputStream out = fs.create(path); try {/* w w w .j av a 2 s . c om*/ writable.write(out); } finally { Closeables.close(out, false); } }
From source file:haflow.component.mahout.logistic.TrainLogistic.java
static void mainToOutput(String[] args) throws Exception { if (parseArgs(args)) { double logPEstimate = 0; int samples = 0; OutputStream o = HdfsUtil.writeHdfs(inforFile); PrintWriter output = new PrintWriter(o, true); CsvRecordFactory csv = lmp.getCsvRecordFactory(); OnlineLogisticRegression lr = lmp.createRegression(); for (int pass = 0; pass < passes; pass++) { BufferedReader in = new BufferedReader(new InputStreamReader(HdfsUtil.open(inputFile))); try { // read variable names csv.firstLine(in.readLine()); String line = in.readLine(); while (line != null) { // for each new line, get target and predictors Vector input = new RandomAccessSparseVector(lmp.getNumFeatures()); int targetValue = csv.processLine(line, input); // check performance while this is still news double logP = lr.logLikelihood(targetValue, input); if (!Double.isInfinite(logP)) { if (samples < 20) { logPEstimate = (samples * logPEstimate + logP) / (samples + 1); } else { logPEstimate = 0.95 * logPEstimate + 0.05 * logP; }/*from w ww. j a v a2s. c o m*/ samples++; } double p = lr.classifyScalar(input); if (scores) { output.printf(Locale.ENGLISH, "%10d %2d %10.2f %2.4f %10.4f %10.4f%n", samples, targetValue, lr.currentLearningRate(), p, logP, logPEstimate); } // now update model lr.train(targetValue, input); line = in.readLine(); } } finally { Closeables.close(in, true); } } //OutputStream modelOutput = new FileOutputStream(outputFile); OutputStream modelOutput = HdfsUtil.writeHdfs(outputFile); try { lmp.saveTo(modelOutput); } finally { Closeables.close(modelOutput, false); } output.println(lmp.getNumFeatures()); output.println(lmp.getTargetVariable() + " ~ "); String sep = ""; for (String v : csv.getTraceDictionary().keySet()) { double weight = predictorWeight(lr, 0, csv, v); if (weight != 0) { output.printf(Locale.ENGLISH, "%s%.3f*%s", sep, weight, v); sep = " + "; } } output.printf("%n"); model = lr; for (int row = 0; row < lr.getBeta().numRows(); row++) { for (String key : csv.getTraceDictionary().keySet()) { double weight = predictorWeight(lr, row, csv, key); if (weight != 0) { output.printf(Locale.ENGLISH, "%20s %.5f%n", key, weight); } } for (int column = 0; column < lr.getBeta().numCols(); column++) { output.printf(Locale.ENGLISH, "%15.9f ", lr.getBeta().get(row, column)); } output.println(); } output.close(); } }