List of usage examples for com.google.common.io Closer create
public static Closer create()
From source file:gobblin.runtime.JobLauncherTestHelper.java
public void runTestWithPullLimit(Properties jobProps, long limit) throws Exception { String jobName = jobProps.getProperty(ConfigurationKeys.JOB_NAME_KEY); String jobId = JobLauncherUtils.newJobId(jobName).toString(); jobProps.setProperty(ConfigurationKeys.JOB_ID_KEY, jobId); Closer closer = Closer.create(); try {/*from w ww . ja v a 2 s. co m*/ JobLauncher jobLauncher = closer .register(JobLauncherFactory.newJobLauncher(this.launcherProps, jobProps)); jobLauncher.launchJob(null); } finally { closer.close(); } List<JobState.DatasetState> datasetStateList = this.datasetStateStore.getAll(jobName, jobId + ".jst"); DatasetState datasetState = datasetStateList.get(0); Assert.assertEquals(datasetState.getState(), JobState.RunningState.COMMITTED); Assert.assertEquals(datasetState.getCompletedTasks(), 4); Assert.assertEquals(datasetState.getJobFailures(), 0); for (TaskState taskState : datasetState.getTaskStates()) { Assert.assertEquals(taskState.getWorkingState(), WorkUnitState.WorkingState.COMMITTED); Assert.assertEquals(taskState.getPropAsLong(ConfigurationKeys.EXTRACTOR_ROWS_EXTRACTED), limit); Assert.assertEquals(taskState.getPropAsLong(ConfigurationKeys.WRITER_ROWS_WRITTEN), limit); } }
From source file:com.moz.fiji.schema.impl.hbase.HBasePagedFijiResult.java
/** * Create a new {@link HBasePagedFijiResult}. * * @param entityId EntityId of the row from which to read cells. * @param dataRequest FijiDataRequest defining the values to retrieve. * @param table The table being viewed.//from w ww . ja v a2s.co m * @param layout The layout of the table. * @param columnTranslator A column name translator for the table. * @param decoderProvider A cell decoder provider for the table. */ public HBasePagedFijiResult(final EntityId entityId, final FijiDataRequest dataRequest, final HBaseFijiTable table, final FijiTableLayout layout, final HBaseColumnNameTranslator columnTranslator, final CellDecoderProvider decoderProvider) { mEntityId = entityId; mDataRequest = dataRequest; mLayout = layout; mColumnTranslator = columnTranslator; mDecoderProvider = decoderProvider; mTable = table; mCloser = Closer.create(); final ImmutableSortedMap.Builder<FijiColumnName, Iterable<FijiCell<T>>> columnResults = ImmutableSortedMap .naturalOrder(); for (Column columnRequest : mDataRequest.getColumns()) { final PagedColumnIterable columnIterable = new PagedColumnIterable(columnRequest); mCloser.register(columnIterable); columnResults.put(columnRequest.getColumnName(), columnIterable); } mColumnResults = columnResults.build(); }
From source file:tachyon.shell.TfsShell.java
public TfsShell(TachyonConf tachyonConf) { mTachyonConf = tachyonConf; mCloser = Closer.create(); mTfs = TachyonFileSystemFactory.get(); }
From source file:com.android.build.gradle.internal.transforms.JarMerger.java
private void addFolder(@NonNull File folder, @NonNull String path, boolean removeEntryTimestamp) throws IOException, ZipAbortException { logger.verbose("addFolder(%1$s, %2$s)", folder, path); File[] files = folder.listFiles(); if (files != null) { for (File file : files) { if (file.isFile()) { String entryPath = path + file.getName(); if (filter == null || filter.checkEntry(entryPath)) { logger.verbose("addFolder(%1$s, %2$s): entry %3$s", folder, path, entryPath); if (typedefRemover != null && typedefRemover.isRemoved(entryPath)) { continue; }/*from w w w . j a v a 2s .c o m*/ // new entry final JarEntry jarEntry = new JarEntry(entryPath); if (removeEntryTimestamp) { jarEntry.setLastModifiedTime(ZERO_TIME); jarEntry.setLastAccessTime(ZERO_TIME); jarEntry.setCreationTime(ZERO_TIME); } jarOutputStream.putNextEntry(jarEntry); // put the file content try (Closer localCloser = Closer.create()) { InputStream fis = localCloser.register(new FileInputStream(file)); if (typedefRemover != null) { fis = typedefRemover.filter(entryPath, fis); assert fis != null; // because we checked isRemoved above } int count; while ((count = fis.read(buffer)) != -1) { jarOutputStream.write(buffer, 0, count); } } // close the entry jarOutputStream.closeEntry(); } } else if (file.isDirectory()) { addFolder(file, path + file.getName() + "/", removeEntryTimestamp); } } } }
From source file:org.apache.jackrabbit.oak.run.SegmentUtils.java
static void backup(File source, File target) throws IOException { Closer closer = Closer.create(); try {/* www . ja v a2s . c om*/ FileStore fs; if (FileStoreBackup.USE_FAKE_BLOBSTORE) { fs = openReadOnlyFileStore(source, newBasicReadOnlyBlobStore()); } else { fs = openReadOnlyFileStore(source); } closer.register(asCloseable(fs)); NodeStore store = SegmentNodeStore.builder(fs).build(); FileStoreBackup.backup(store, target); } catch (Throwable e) { throw closer.rethrow(e); } finally { closer.close(); } }
From source file:alluxio.cli.ConfigurationDocGenerator.java
/** * Writes description of property key to yml files. * * @param defaultKeys Collection which is from PropertyKey DEFAULT_KEYS_MAP.values() * @param filePath path for csv files//from www . j av a2 s. c om */ static void writeYMLFile(Collection<? extends PropertyKey> defaultKeys, String filePath) throws IOException { if (defaultKeys.size() == 0) { return; } FileWriter fileWriter; Closer closer = Closer.create(); String[] fileNames = { "user-configuration.yml", "master-configuration.yml", "worker-configuration.yml", "security-configuration.yml", "key-value-configuration.yml", "common-configuration.yml", "cluster-management-configuration.yml" }; try { // HashMap for FileWriter per each category Map<String, FileWriter> fileWriterMap = new HashMap<>(); for (String fileName : fileNames) { fileWriter = new FileWriter(PathUtils.concatPath(filePath, fileName)); //put fileWriter String key = fileName.substring(0, fileName.indexOf("configuration") - 1); fileWriterMap.put(key, fileWriter); //register file writer closer.register(fileWriter); } // Sort defaultKeys List<PropertyKey> dfkeys = new ArrayList<>(defaultKeys); Collections.sort(dfkeys); for (PropertyKey iteratorPK : dfkeys) { String pKey = iteratorPK.toString(); // Puts descriptions in single quotes to avoid having to escaping reserved characters. // Still needs to escape single quotes with double single quotes. String description = iteratorPK.getDescription().replace("'", "''"); // Write property key and default value to yml files if (iteratorPK.isIgnoredSiteProperty()) { description += " Note: This property must be specified as a JVM property; " + "it is not accepted in alluxio-site.properties."; } String keyValueStr = pKey + ":\n '" + description + "'\n"; if (pKey.startsWith("alluxio.user.")) { fileWriter = fileWriterMap.get("user"); } else if (pKey.startsWith("alluxio.master.")) { fileWriter = fileWriterMap.get("master"); } else if (pKey.startsWith("alluxio.worker.")) { fileWriter = fileWriterMap.get("worker"); } else if (pKey.startsWith("alluxio.security.")) { fileWriter = fileWriterMap.get("security"); } else if (pKey.startsWith("alluxio.keyvalue.")) { fileWriter = fileWriterMap.get("key-value"); } else if (pKey.startsWith("alluxio.integration.")) { fileWriter = fileWriterMap.get("cluster-management"); } else { fileWriter = fileWriterMap.get("common"); } fileWriter.append(keyValueStr); } LOG.info("YML files for description of Property Keys were created successfully."); } catch (Exception e) { throw closer.rethrow(e); } finally { try { closer.close(); } catch (IOException e) { LOG.error("Error while flushing/closing YML files for description of Property Keys " + "FileWriter", e); } } }
From source file:org.pantsbuild.testproject.annotation.processor.ResourceMappingProcessor.java
@Override public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { if (roundEnv.processingOver()) { return false; }//from w ww. j a va2 s .c om FileObject outputFile = createResourceOrDie("" /* no package */, REPORT_FILE_NAME); Closer closer = Closer.create(); try { Set<String> typeNames = new HashSet<String>(); PrintWriter writer = closer.register(new PrintWriter(outputFile.openWriter())); for (TypeElement appAnnotation : annotations) { Set<? extends Element> annotatedElements = roundEnv.getElementsAnnotatedWith(appAnnotation); Set<TypeElement> elements = ElementFilter.typesIn(annotatedElements); for (Element elem : elements) { if (!(elem instanceof TypeElement)) continue; TypeElement typeElem = (TypeElement) elem; String typeName = elementUtils.getBinaryName(typeElem).toString(); typeNames.add(typeName); writer.println(typeName); } } closer.close(); writeResourceMapping(typeNames, outputFile); log(Diagnostic.Kind.NOTE, "Generated resource '%s'", outputFile.toUri()); } catch (IOException e) { throw new RuntimeException(e); } return true; }
From source file:org.restexpress.plugin.content.adapter.AbstractContextAdapter.java
/** * Retrieve specified resource name associated with the given remote path. * /* ww w. j a v a2 s .com*/ * @param context current context * @param name resource name * @param remotePath remote path * @return a {@link File} resource or null if not exists. * @throws IOException */ protected File retrieveFile(final T context, final String name, final String remotePath) throws IOException { // test if exist if (exists(context, remotePath)) { File target = getLocalFile(name); // build local file system final File parent = target.getParentFile(); if (parent != null) parent.mkdirs(); // copy file final Closer closer = Closer.create(); try { // get a copy. final InputStream in = closer.register(get(context, remotePath)); final OutputStream out = closer.register(new FileOutputStream(target)); ByteStreams.copy(in, out); out.flush(); } catch (final Throwable e) { if (target != null) target.delete(); target = null; } finally { closer.close(); } return target; } return null; }
From source file:io.druid.segment.IndexMergerV9.java
@Override protected File makeIndexFiles(final List<IndexableAdapter> adapters, final AggregatorFactory[] metricAggs, final File outDir, final ProgressIndicator progress, final List<String> mergedDimensions, final List<String> mergedMetrics, final Function<ArrayList<Iterable<Rowboat>>, Iterable<Rowboat>> rowMergerFn, final IndexSpec indexSpec) throws IOException { progress.start();/*from w ww . ja va2 s. c o m*/ progress.progress(); List<Metadata> metadataList = Lists.transform(adapters, new Function<IndexableAdapter, Metadata>() { @Override public Metadata apply(IndexableAdapter input) { return input.getMetadata(); } }); Metadata segmentMetadata = null; if (metricAggs != null) { AggregatorFactory[] combiningMetricAggs = new AggregatorFactory[metricAggs.length]; for (int i = 0; i < metricAggs.length; i++) { combiningMetricAggs[i] = metricAggs[i].getCombiningFactory(); } segmentMetadata = Metadata.merge(metadataList, combiningMetricAggs); } else { segmentMetadata = Metadata.merge(metadataList, null); } Closer closer = Closer.create(); final IOPeon ioPeon = new TmpFileIOPeon(false); closer.register(new Closeable() { @Override public void close() throws IOException { ioPeon.cleanup(); } }); final FileSmoosher v9Smoosher = new FileSmoosher(outDir); final File v9TmpDir = new File(outDir, "v9-tmp"); v9TmpDir.mkdirs(); closer.register(new Closeable() { @Override public void close() throws IOException { FileUtils.deleteDirectory(v9TmpDir); } }); log.info("Start making v9 index files, outDir:%s", outDir); try { long startTime = System.currentTimeMillis(); ByteStreams.write(Ints.toByteArray(IndexIO.V9_VERSION), Files.newOutputStreamSupplier(new File(outDir, "version.bin"))); log.info("Completed version.bin in %,d millis.", System.currentTimeMillis() - startTime); progress.progress(); final Map<String, ValueType> metricsValueTypes = Maps .newTreeMap(Ordering.<String>natural().nullsFirst()); final Map<String, String> metricTypeNames = Maps.newTreeMap(Ordering.<String>natural().nullsFirst()); final List<ColumnCapabilitiesImpl> dimCapabilities = Lists .newArrayListWithCapacity(mergedDimensions.size()); mergeCapabilities(adapters, mergedDimensions, metricsValueTypes, metricTypeNames, dimCapabilities); final DimensionHandler[] handlers = makeDimensionHandlers(mergedDimensions, dimCapabilities); final List<DimensionMerger> mergers = new ArrayList<>(); for (int i = 0; i < mergedDimensions.size(); i++) { mergers.add(handlers[i].makeMerger(indexSpec, v9TmpDir, ioPeon, dimCapabilities.get(i), progress)); } /************* Setup Dim Conversions **************/ progress.progress(); startTime = System.currentTimeMillis(); final ArrayList<Map<String, IntBuffer>> dimConversions = Lists .newArrayListWithCapacity(adapters.size()); final ArrayList<Boolean> dimensionSkipFlag = Lists.newArrayListWithCapacity(mergedDimensions.size()); final ArrayList<Boolean> convertMissingDimsFlags = Lists .newArrayListWithCapacity(mergedDimensions.size()); writeDimValueAndSetupDimConversion(adapters, progress, mergedDimensions, mergers); log.info("Completed dim conversions in %,d millis.", System.currentTimeMillis() - startTime); /************* Walk through data sets, merge them, and write merged columns *************/ progress.progress(); final Iterable<Rowboat> theRows = makeRowIterable(adapters, mergedDimensions, mergedMetrics, rowMergerFn, dimCapabilities, handlers, mergers); final LongColumnSerializer timeWriter = setupTimeWriter(ioPeon, indexSpec); final ArrayList<GenericColumnSerializer> metWriters = setupMetricsWriters(ioPeon, mergedMetrics, metricsValueTypes, metricTypeNames, indexSpec); final List<IntBuffer> rowNumConversions = Lists.newArrayListWithCapacity(adapters.size()); mergeIndexesAndWriteColumns(adapters, progress, theRows, timeWriter, metWriters, rowNumConversions, mergers); /************ Create Inverted Indexes and Finalize Build Columns *************/ final String section = "build inverted index and columns"; progress.startSection(section); makeTimeColumn(v9Smoosher, progress, timeWriter); makeMetricsColumns(v9Smoosher, progress, mergedMetrics, metricsValueTypes, metricTypeNames, metWriters); for (int i = 0; i < mergedDimensions.size(); i++) { DimensionMergerV9 merger = (DimensionMergerV9) mergers.get(i); merger.writeIndexes(rowNumConversions, closer); if (merger.canSkip()) { continue; } ColumnDescriptor columnDesc = merger.makeColumnDescriptor(); makeColumn(v9Smoosher, mergedDimensions.get(i), columnDesc); } progress.stopSection(section); /************* Make index.drd & metadata.drd files **************/ progress.progress(); makeIndexBinary(v9Smoosher, adapters, outDir, mergedDimensions, mergedMetrics, progress, indexSpec, mergers); makeMetadataBinary(v9Smoosher, progress, segmentMetadata); v9Smoosher.close(); progress.stop(); return outDir; } catch (Throwable t) { throw closer.rethrow(t); } finally { closer.close(); } }
From source file:gobblin.publisher.BaseDataPublisher.java
public BaseDataPublisher(State state) throws IOException { super(state); this.closer = Closer.create(); Configuration conf = new Configuration(); // Add all job configuration properties so they are picked up by Hadoop for (String key : this.getState().getPropertyNames()) { conf.set(key, this.getState().getProp(key)); }//w ww. j av a 2 s . c om this.numBranches = this.getState().getPropAsInt(ConfigurationKeys.FORK_BRANCHES_KEY, 1); this.writerFileSystemByBranches = Lists.newArrayListWithCapacity(this.numBranches); this.publisherFileSystemByBranches = Lists.newArrayListWithCapacity(this.numBranches); this.metaDataWriterFileSystemByBranches = Lists.newArrayListWithCapacity(this.numBranches); this.publisherFinalDirOwnerGroupsByBranches = Lists.newArrayListWithCapacity(this.numBranches); this.permissions = Lists.newArrayListWithCapacity(this.numBranches); this.metadataMergers = mergersForEachBranch(); // Get a FileSystem instance for each branch for (int i = 0; i < this.numBranches; i++) { URI writerUri = URI.create(this.getState().getProp(ForkOperatorUtils .getPropertyNameForBranch(ConfigurationKeys.WRITER_FILE_SYSTEM_URI, this.numBranches, i), ConfigurationKeys.LOCAL_FS_URI)); this.writerFileSystemByBranches.add(FileSystem.get(writerUri, conf)); URI publisherUri = URI .create(this.getState().getProp( ForkOperatorUtils.getPropertyNameForBranch( ConfigurationKeys.DATA_PUBLISHER_FILE_SYSTEM_URI, this.numBranches, i), writerUri.toString())); this.publisherFileSystemByBranches.add(FileSystem.get(publisherUri, conf)); this.metaDataWriterFileSystemByBranches.add(FileSystem.get(publisherUri, conf)); // The group(s) will be applied to the final publisher output directory(ies) this.publisherFinalDirOwnerGroupsByBranches .add(Optional.fromNullable(this.getState().getProp(ForkOperatorUtils.getPropertyNameForBranch( ConfigurationKeys.DATA_PUBLISHER_FINAL_DIR_GROUP, this.numBranches, i)))); // The permission(s) will be applied to all directories created by the publisher, // which do NOT include directories created by the writer and moved by the publisher. // The permissions of those directories are controlled by writer.file.permissions and writer.dir.permissions. this.permissions.add(new FsPermission(state.getPropAsShortWithRadix( ForkOperatorUtils.getPropertyNameForBranch(ConfigurationKeys.DATA_PUBLISHER_PERMISSIONS, this.numBranches, i), FsPermission.getDefault().toShort(), ConfigurationKeys.PERMISSION_PARSING_RADIX))); } this.parallelRunnerThreads = state.getPropAsInt(ParallelRunner.PARALLEL_RUNNER_THREADS_KEY, ParallelRunner.DEFAULT_PARALLEL_RUNNER_THREADS); this.parallelRunnerCloser = Closer.create(); }