Example usage for com.google.common.io Closer create

List of usage examples for com.google.common.io Closer create

Introduction

In this page you can find the example usage for com.google.common.io Closer create.

Prototype

public static Closer create() 

Source Link

Document

Creates a new Closer .

Usage

From source file:eu.interedition.text.repository.JdbcStore.java

JdbcStore writeSchema() {
    final Closer closer = Closer.create();
    try {//from  w  w  w .  ja va  2 s . co m
        restore(closer.register(
                new InputStreamReader(getClass().getResourceAsStream("schema.sql"), Charset.forName("UTF-8"))));
        return this;
    } finally {
        try {
            closer.close();
        } catch (IOException e) {
            throw Throwables.propagate(e);
        }
    }
}

From source file:com.taobao.android.builder.tools.jarmerge.JarMergerWithOverride.java

public void addJar(@NonNull File file, boolean removeEntryTimestamp) throws IOException {
    logger.verbose("addJar(%1$s)", file);
    init();// w  w w  . j av  a  2  s  .  c o  m

    Closer localCloser = Closer.create();
    try {
        FileInputStream fis = localCloser.register(new FileInputStream(file));
        ZipInputStream zis = localCloser.register(new ZipInputStream(fis));

        // loop on the entries of the jar file package and put them in the final jar
        ZipEntry entry;
        while ((entry = zis.getNextEntry()) != null) {
            // do not take directories or anything inside a potential META-INF folder.
            if (entry.isDirectory()) {
                continue;
            }

            String name = entry.getName();
            if (filter != null && !filter.checkEntry(entry.getName())) {
                continue;
            }
            JarEntry newEntry;

            // Preserve the STORED method of the input entry.
            if (entry.getMethod() == JarEntry.STORED) {
                newEntry = new JarEntry(entry);
            } else {
                // Create a new entry so that the compressed len is recomputed.
                newEntry = new JarEntry(name);
            }
            if (removeEntryTimestamp) {
                newEntry.setTime(0);
            }

            // add the entry to the jar archive
            logger.verbose("addJar(%1$s): entry %2$s", file, name);
            duplicates.put(name, file.getAbsolutePath());
            if (duplicates.get(name).size() > 1) {
                logger.info("[Duplicated]" + name + ":" + file.getAbsolutePath() + ":" + duplicates.get(name));
                continue;
            }

            jarOutputStream.putNextEntry(newEntry);

            // read the content of the entry from the input stream, and write it into the archive.
            int count;
            while ((count = zis.read(buffer)) != -1) {
                jarOutputStream.write(buffer, 0, count);
            }

            // close the entries for this file
            jarOutputStream.closeEntry();
            zis.closeEntry();
        }
    } catch (ZipAbortException e) {
        throw new IOException("check exception", e);
    } finally {
        localCloser.close();
    }
}

From source file:tachyon.shell.TfsShell.java

/**
 * Copies a file specified by argv from the filesystem to the local filesystem. This is the
 * utility function./*  ww  w.  j a va 2 s . co  m*/
 *
 * @param srcPath The source TachyonURI (has to be a file)
 * @param dstFile The destination file in the local filesystem
 * @throws IOException
 */
public void copyFileToLocal(TachyonURI srcPath, File dstFile) throws IOException {
    try {
        TachyonFile srcFd = mTfs.open(srcPath);
        File tmpDst = File.createTempFile("copyToLocal", null);
        tmpDst.deleteOnExit();

        Closer closer = Closer.create();
        try {
            InStreamOptions op = new InStreamOptions.Builder(mTachyonConf)
                    .setTachyonStorageType(TachyonStorageType.NO_STORE).build();
            FileInStream is = closer.register(mTfs.getInStream(srcFd, op));
            FileOutputStream out = closer.register(new FileOutputStream(tmpDst));
            byte[] buf = new byte[64 * Constants.MB];
            int t = is.read(buf);
            while (t != -1) {
                out.write(buf, 0, t);
                t = is.read(buf);
            }
            if (!tmpDst.renameTo(dstFile)) {
                throw new IOException(
                        "Failed to rename " + tmpDst.getPath() + " to destination " + dstFile.getPath());
            }
            System.out.println("Copied " + srcPath + " to " + dstFile.getPath());
        } finally {
            closer.close();
        }
    } catch (TachyonException e) {
        throw new IOException(e.getMessage());
    }
}

From source file:org.apache.gobblin.runtime.AbstractJobLauncher.java

@Override
public void launchJob(JobListener jobListener) throws JobException {
    String jobId = this.jobContext.getJobId();
    final JobState jobState = this.jobContext.getJobState();

    try {//from   w w w.j  a  v a  2s.co m
        MDC.put(ConfigurationKeys.JOB_NAME_KEY, this.jobContext.getJobName());
        MDC.put(ConfigurationKeys.JOB_KEY_KEY, this.jobContext.getJobKey());
        TimingEvent launchJobTimer = this.eventSubmitter
                .getTimingEvent(TimingEvent.LauncherTimings.FULL_JOB_EXECUTION);

        try (Closer closer = Closer.create()) {
            closer.register(this.jobContext);
            notifyListeners(this.jobContext, jobListener, TimingEvent.LauncherTimings.JOB_PREPARE,
                    new JobListenerAction() {
                        @Override
                        public void apply(JobListener jobListener, JobContext jobContext) throws Exception {
                            jobListener.onJobPrepare(jobContext);
                        }
                    });

            if (this.jobContext.getSemantics() == DeliverySemantics.EXACTLY_ONCE) {

                // If exactly-once is used, commit sequences of the previous run must be successfully compelted
                // before this run can make progress.
                executeUnfinishedCommitSequences(jobState.getJobName());
            }

            TimingEvent workUnitsCreationTimer = this.eventSubmitter
                    .getTimingEvent(TimingEvent.LauncherTimings.WORK_UNITS_CREATION);
            Source<?, ?> source = this.jobContext.getSource();
            WorkUnitStream workUnitStream;
            if (source instanceof WorkUnitStreamSource) {
                workUnitStream = ((WorkUnitStreamSource) source).getWorkunitStream(jobState);
            } else {
                workUnitStream = new BasicWorkUnitStream.Builder(source.getWorkunits(jobState)).build();
            }
            workUnitsCreationTimer.stop(
                    this.eventMetadataGenerator.getMetadata(this.jobContext, EventName.WORK_UNITS_CREATION));

            // The absence means there is something wrong getting the work units
            if (workUnitStream == null || workUnitStream.getWorkUnits() == null) {
                this.eventSubmitter.submit(JobEvent.WORK_UNITS_MISSING);
                jobState.setState(JobState.RunningState.FAILED);
                throw new JobException("Failed to get work units for job " + jobId);
            }

            // No work unit to run
            if (!workUnitStream.getWorkUnits().hasNext()) {
                this.eventSubmitter.submit(JobEvent.WORK_UNITS_EMPTY);
                LOG.warn("No work units have been created for job " + jobId);
                jobState.setState(JobState.RunningState.COMMITTED);
                notifyListeners(this.jobContext, jobListener, TimingEvent.LauncherTimings.JOB_COMPLETE,
                        new JobListenerAction() {
                            @Override
                            public void apply(JobListener jobListener, JobContext jobContext) throws Exception {
                                jobListener.onJobCompletion(jobContext);
                            }
                        });
                return;
            }

            //Initialize writer and converter(s)
            closer.register(WriterInitializerFactory.newInstace(jobState, workUnitStream)).initialize();
            closer.register(ConverterInitializerFactory.newInstance(jobState, workUnitStream)).initialize();

            TimingEvent stagingDataCleanTimer = this.eventSubmitter
                    .getTimingEvent(TimingEvent.RunJobTimings.MR_STAGING_DATA_CLEAN);
            // Cleanup left-over staging data possibly from the previous run. This is particularly
            // important if the current batch of WorkUnits include failed WorkUnits from the previous
            // run which may still have left-over staging data not cleaned up yet.
            cleanLeftoverStagingData(workUnitStream, jobState);
            stagingDataCleanTimer.stop(
                    this.eventMetadataGenerator.getMetadata(this.jobContext, EventName.MR_STAGING_DATA_CLEAN));

            long startTime = System.currentTimeMillis();
            jobState.setStartTime(startTime);
            jobState.setState(JobState.RunningState.RUNNING);

            try {
                LOG.info("Starting job " + jobId);

                notifyListeners(this.jobContext, jobListener, TimingEvent.LauncherTimings.JOB_START,
                        new JobListenerAction() {
                            @Override
                            public void apply(JobListener jobListener, JobContext jobContext) throws Exception {
                                jobListener.onJobStart(jobContext);
                            }
                        });

                TimingEvent workUnitsPreparationTimer = this.eventSubmitter
                        .getTimingEvent(TimingEvent.LauncherTimings.WORK_UNITS_PREPARATION);
                // Add task ids
                workUnitStream = prepareWorkUnits(workUnitStream, jobState);
                // Remove skipped workUnits from the list of work units to execute.
                workUnitStream = workUnitStream.filter(new SkippedWorkUnitsFilter(jobState));
                // Add surviving tasks to jobState
                workUnitStream = workUnitStream.transform(new MultiWorkUnitForEach() {
                    @Override
                    public void forWorkUnit(WorkUnit workUnit) {
                        jobState.incrementTaskCount();
                        jobState.addTaskState(new TaskState(new WorkUnitState(workUnit, jobState)));
                    }
                });

                // dump the work unit if tracking logs are enabled
                if (jobState.getPropAsBoolean(ConfigurationKeys.WORK_UNIT_ENABLE_TRACKING_LOGS)) {
                    workUnitStream = workUnitStream.transform(new Function<WorkUnit, WorkUnit>() {
                        @Nullable
                        @Override
                        public WorkUnit apply(@Nullable WorkUnit input) {
                            LOG.info("Work unit tracking log: {}", input);
                            return input;
                        }
                    });
                }

                workUnitsPreparationTimer.stop(this.eventMetadataGenerator.getMetadata(this.jobContext,
                        EventName.WORK_UNITS_PREPARATION));

                // Write job execution info to the job history store before the job starts to run
                this.jobContext.storeJobExecutionInfo();

                TimingEvent jobRunTimer = this.eventSubmitter
                        .getTimingEvent(TimingEvent.LauncherTimings.JOB_RUN);
                // Start the job and wait for it to finish
                runWorkUnitStream(workUnitStream);
                jobRunTimer.stop(this.eventMetadataGenerator.getMetadata(this.jobContext, EventName.JOB_RUN));

                this.eventSubmitter.submit(
                        CaseFormat.UPPER_UNDERSCORE.to(CaseFormat.UPPER_CAMEL, "JOB_" + jobState.getState()));

                // Check and set final job jobPropsState upon job completion
                if (jobState.getState() == JobState.RunningState.CANCELLED) {
                    LOG.info(String.format("Job %s has been cancelled, aborting now", jobId));
                    return;
                }

                TimingEvent jobCommitTimer = this.eventSubmitter
                        .getTimingEvent(TimingEvent.LauncherTimings.JOB_COMMIT);
                this.jobContext.finalizeJobStateBeforeCommit();
                this.jobContext.commit();
                postProcessJobState(jobState);
                jobCommitTimer
                        .stop(this.eventMetadataGenerator.getMetadata(this.jobContext, EventName.JOB_COMMIT));
            } finally {
                long endTime = System.currentTimeMillis();
                jobState.setEndTime(endTime);
                jobState.setDuration(endTime - jobState.getStartTime());
            }
        } catch (Throwable t) {
            jobState.setState(JobState.RunningState.FAILED);
            String errMsg = "Failed to launch and run job " + jobId;
            LOG.error(errMsg + ": " + t, t);
        } finally {
            try {
                TimingEvent jobCleanupTimer = this.eventSubmitter
                        .getTimingEvent(TimingEvent.LauncherTimings.JOB_CLEANUP);
                cleanupStagingData(jobState);
                jobCleanupTimer
                        .stop(this.eventMetadataGenerator.getMetadata(this.jobContext, EventName.JOB_CLEANUP));

                // Write job execution info to the job history store upon job termination
                this.jobContext.storeJobExecutionInfo();
            } finally {
                launchJobTimer.stop(
                        this.eventMetadataGenerator.getMetadata(this.jobContext, EventName.FULL_JOB_EXECUTION));
            }
        }

        for (JobState.DatasetState datasetState : this.jobContext.getDatasetStatesByUrns().values()) {
            // Set the overall job state to FAILED if the job failed to process any dataset
            if (datasetState.getState() == JobState.RunningState.FAILED) {
                jobState.setState(JobState.RunningState.FAILED);
                LOG.warn("At least one dataset state is FAILED. Setting job state to FAILED.");
                break;
            }
        }

        notifyListeners(this.jobContext, jobListener, TimingEvent.LauncherTimings.JOB_COMPLETE,
                new JobListenerAction() {
                    @Override
                    public void apply(JobListener jobListener, JobContext jobContext) throws Exception {
                        jobListener.onJobCompletion(jobContext);
                    }
                });

        if (jobState.getState() == JobState.RunningState.FAILED) {
            notifyListeners(this.jobContext, jobListener, TimingEvent.LauncherTimings.JOB_FAILED,
                    new JobListenerAction() {
                        @Override
                        public void apply(JobListener jobListener, JobContext jobContext) throws Exception {
                            jobListener.onJobFailure(jobContext);
                        }
                    });
            throw new JobException(String.format("Job %s failed", jobId));
        }
    } finally {
        // Stop metrics reporting
        if (this.jobContext.getJobMetricsOptional().isPresent()) {
            JobMetrics.remove(jobState);
        }
        MDC.remove(ConfigurationKeys.JOB_NAME_KEY);
        MDC.remove(ConfigurationKeys.JOB_KEY_KEY);
    }
}

From source file:alluxio.cli.fs.command.CpCommand.java

/**
 * Copies a file or directory specified by srcPath from the local filesystem to dstPath in the
 * Alluxio filesystem space./*  w  w w  . j a  va2s .co m*/
 *
 * @param srcPath the {@link AlluxioURI} of the source file in the local filesystem
 * @param dstPath the {@link AlluxioURI} of the destination
 */
private void copyPath(AlluxioURI srcPath, AlluxioURI dstPath) throws AlluxioException, IOException {
    File src = new File(srcPath.getPath());
    if (!src.isDirectory()) {
        // If the dstPath is a directory, then it should be updated to be the path of the file where
        // src will be copied to.
        if (mFileSystem.exists(dstPath) && mFileSystem.getStatus(dstPath).isFolder()) {
            dstPath = dstPath.join(src.getName());
        }

        FileOutStream os = null;
        try (Closer closer = Closer.create()) {
            FileWriteLocationPolicy locationPolicy;
            locationPolicy = CommonUtils.createNewClassInstance(
                    Configuration.<FileWriteLocationPolicy>getClass(
                            PropertyKey.USER_FILE_COPY_FROM_LOCAL_WRITE_LOCATION_POLICY),
                    new Class[] {}, new Object[] {});
            os = closer.register(mFileSystem.createFile(dstPath,
                    CreateFileOptions.defaults().setLocationPolicy(locationPolicy)));
            FileInputStream in = closer.register(new FileInputStream(src));
            FileChannel channel = closer.register(in.getChannel());
            ByteBuffer buf = ByteBuffer.allocate(8 * Constants.MB);
            while (channel.read(buf) != -1) {
                buf.flip();
                os.write(buf.array(), 0, buf.limit());
            }
        } catch (Exception e) {
            // Close the out stream and delete the file, so we don't have an incomplete file lying
            // around.
            if (os != null) {
                os.cancel();
                if (mFileSystem.exists(dstPath)) {
                    mFileSystem.delete(dstPath);
                }
            }
            throw e;
        }
    } else {
        mFileSystem.createDirectory(dstPath);
        List<String> errorMessages = new ArrayList<>();
        File[] fileList = src.listFiles();
        if (fileList == null) {
            String errMsg = String.format("Failed to list files for directory %s", src);
            errorMessages.add(errMsg);
            fileList = new File[0];
        }
        int misFiles = 0;
        for (File srcFile : fileList) {
            AlluxioURI newURI = new AlluxioURI(dstPath, new AlluxioURI(srcFile.getName()));
            try {
                copyPath(new AlluxioURI(srcPath.getScheme(), srcPath.getAuthority(), srcFile.getPath()),
                        newURI);
            } catch (IOException e) {
                errorMessages.add(e.getMessage());
                if (!mFileSystem.exists(newURI)) {
                    misFiles++;
                }
            }
        }
        if (errorMessages.size() != 0) {
            if (misFiles == fileList.length) {
                // If the directory doesn't exist and no files were created, then delete the directory
                if (mFileSystem.exists(dstPath)) {
                    mFileSystem.delete(dstPath);
                }
            }
            throw new IOException(Joiner.on('\n').join(errorMessages));
        }
    }
}

From source file:gobblin.compaction.mapreduce.MRCompactor.java

public MRCompactor(Properties props, List<? extends Tag<?>> tags, Optional<CompactorListener> compactorListener)
        throws IOException {
    this.state = new State();
    this.state.addAll(props);
    this.initilizeTime = getCurrentTime();
    this.tags = tags;
    this.conf = HadoopUtils.getConfFromState(this.state);
    this.tmpOutputDir = getTmpOutputDir();
    this.fs = getFileSystem();
    this.datasets = getDatasetsFinder().findDistinctDatasets();
    this.jobExecutor = createJobExecutor();
    this.jobRunnables = Maps.newConcurrentMap();
    this.closer = Closer.create();
    this.stopwatch = Stopwatch.createStarted();
    this.gobblinMetrics = initializeMetrics();
    this.eventSubmitter = new EventSubmitter.Builder(
            GobblinMetrics.get(this.state.getProp(ConfigurationKeys.JOB_NAME_KEY)).getMetricContext(),
            MRCompactor.COMPACTION_TRACKING_EVENTS_NAMESPACE).build();
    this.compactorListener = compactorListener;
    this.dataVerifTimeoutMinutes = getDataVerifTimeoutMinutes();
    this.compactionTimeoutMinutes = getCompactionTimeoutMinutes();
    this.shouldVerifDataCompl = shouldVerifyDataCompleteness();
    this.compactionCompleteListener = getCompactionCompleteListener();
    this.verifier = this.shouldVerifDataCompl
            ? Optional.of(this.closer.register(new DataCompletenessVerifier(this.state)))
            : Optional.<DataCompletenessVerifier>absent();
    this.shouldPublishDataIfCannotVerifyCompl = shouldPublishDataIfCannotVerifyCompl();
}

From source file:com.taobao.android.builder.tasks.app.databinding.AwbDataBindingMergeArtifactsTask.java

private void extractBinFilesFromJar(File outFolder, File jarFile) throws IOException {
    File jarOutFolder = getOutFolderForJarFile(outFolder, jarFile);
    FileUtils.deleteQuietly(jarOutFolder);
    FileUtils.forceMkdir(jarOutFolder);/*from w  w  w.  j  a  va  2 s  . c  o m*/

    try (Closer localCloser = Closer.create()) {
        FileInputStream fis = localCloser.register(new FileInputStream(jarFile));
        ZipInputStream zis = localCloser.register(new ZipInputStream(fis));
        ZipEntry entry;
        while ((entry = zis.getNextEntry()) != null) {
            if (entry.isDirectory()) {
                continue;
            }

            String name = entry.getName();

            if (!isResource(name)) {
                continue;
            }
            // get rid of the path. We don't need it since the file name includes the domain
            name = new File(name).getName();
            File out = new File(jarOutFolder, name);
            //noinspection ResultOfMethodCallIgnored
            FileOutputStream fos = localCloser.register(new FileOutputStream(out));
            ByteStreams.copy(zis, fos);
            zis.closeEntry();
        }
    }
}

From source file:alluxio.shell.command.CpCommand.java

/**
 * Copies a file or directory specified by srcPath from the local filesystem to dstPath in the
 * Alluxio filesystem space./*ww  w .  j  a va  2  s.c o m*/
 *
 * @param srcPath the {@link AlluxioURI} of the source file in the local filesystem
 * @param dstPath the {@link AlluxioURI} of the destination
 * @throws AlluxioException when Alluxio exception occurs
 * @throws IOException when non-Alluxio exception occurs
 */
private void copyPath(AlluxioURI srcPath, AlluxioURI dstPath) throws AlluxioException, IOException {
    File src = new File(srcPath.getPath());
    if (!src.isDirectory()) {
        // If the dstPath is a directory, then it should be updated to be the path of the file where
        // src will be copied to.
        if (mFileSystem.exists(dstPath) && mFileSystem.getStatus(dstPath).isFolder()) {
            dstPath = dstPath.join(src.getName());
        }

        FileOutStream os = null;
        try (Closer closer = Closer.create()) {
            os = closer.register(mFileSystem.createFile(dstPath));
            FileInputStream in = closer.register(new FileInputStream(src));
            FileChannel channel = closer.register(in.getChannel());
            ByteBuffer buf = ByteBuffer.allocate(8 * Constants.MB);
            while (channel.read(buf) != -1) {
                buf.flip();
                os.write(buf.array(), 0, buf.limit());
            }
        } catch (Exception e) {
            // Close the out stream and delete the file, so we don't have an incomplete file lying
            // around.
            if (os != null) {
                os.cancel();
                if (mFileSystem.exists(dstPath)) {
                    mFileSystem.delete(dstPath);
                }
            }
            throw e;
        }
    } else {
        mFileSystem.createDirectory(dstPath);
        List<String> errorMessages = new ArrayList<>();
        File[] fileList = src.listFiles();
        if (fileList == null) {
            String errMsg = String.format("Failed to list files for directory %s", src);
            errorMessages.add(errMsg);
            fileList = new File[0];
        }
        int misFiles = 0;
        for (File srcFile : fileList) {
            AlluxioURI newURI = new AlluxioURI(dstPath, new AlluxioURI(srcFile.getName()));
            try {
                copyPath(new AlluxioURI(srcPath.getScheme(), srcPath.getAuthority(), srcFile.getPath()),
                        newURI);
            } catch (IOException e) {
                errorMessages.add(e.getMessage());
                if (!mFileSystem.exists(newURI)) {
                    misFiles++;
                }
            }
        }
        if (errorMessages.size() != 0) {
            if (misFiles == fileList.length) {
                // If the directory doesn't exist and no files were created, then delete the directory
                if (mFileSystem.exists(dstPath)) {
                    mFileSystem.delete(dstPath);
                }
            }
            throw new IOException(Joiner.on('\n').join(errorMessages));
        }
    }
}

From source file:org.pantsbuild.tools.jar.Main.java

private Manifest loadManifest() throws IOException {
    Manifest mf = new Manifest();
    if (options.manifest != null) {
        Closer closer = Closer.create();
        try {//  w  w  w.j  av  a2s . c  o  m
            FileInputStream input = closer.register(new FileInputStream(options.manifest));
            mf.read(input);
        } catch (IOException e) {
            throw closer.rethrow(new IOException("Failed to load manifest from " + options.manifest, e));
        } finally {
            closer.close();
        }
    }
    return JarBuilder.ensureDefaultManifestEntries(mf);
}

From source file:org.apache.gobblin.yarn.YarnService.java

private ByteBuffer getSecurityTokens() throws IOException {
    Credentials credentials = UserGroupInformation.getCurrentUser().getCredentials();
    Closer closer = Closer.create();
    try {/* www.j av  a2s .  c  om*/
        DataOutputBuffer dataOutputBuffer = closer.register(new DataOutputBuffer());
        credentials.writeTokenStorageToStream(dataOutputBuffer);

        // Remove the AM->RM token so that containers cannot access it
        Iterator<Token<?>> tokenIterator = credentials.getAllTokens().iterator();
        while (tokenIterator.hasNext()) {
            Token<?> token = tokenIterator.next();
            if (token.getKind().equals(AMRMTokenIdentifier.KIND_NAME)) {
                tokenIterator.remove();
            }
        }

        return ByteBuffer.wrap(dataOutputBuffer.getData(), 0, dataOutputBuffer.getLength());
    } catch (Throwable t) {
        throw closer.rethrow(t);
    } finally {
        closer.close();
    }
}