Example usage for com.google.common.io Closer create

List of usage examples for com.google.common.io Closer create

Introduction

In this page you can find the example usage for com.google.common.io Closer create.

Prototype

public static Closer create() 

Source Link

Document

Creates a new Closer .

Usage

From source file:tachyon.shell.command.CopyFromLocalCommand.java

private void copyPath(File src, TachyonURI dstPath) throws IOException {
    try {// w  ww .ja va2  s . c  o  m
        if (!src.isDirectory()) {
            // If the dstPath is a directory, then it should be updated to be the path of the file where
            // src will be copied to
            if (mTfs.exists(dstPath) && mTfs.getStatus(dstPath).isFolder()) {
                dstPath = dstPath.join(src.getName());
            }

            Closer closer = Closer.create();
            FileOutStream os = null;
            try {
                os = closer.register(mTfs.createFile(dstPath));
                FileInputStream in = closer.register(new FileInputStream(src));
                FileChannel channel = closer.register(in.getChannel());
                ByteBuffer buf = ByteBuffer.allocate(8 * Constants.MB);
                while (channel.read(buf) != -1) {
                    buf.flip();
                    os.write(buf.array(), 0, buf.limit());
                }
            } catch (IOException e) {
                // Close the out stream and delete the file, so we don't have an incomplete file lying
                // around
                if (os != null) {
                    os.cancel();
                    if (mTfs.exists(dstPath)) {
                        mTfs.delete(dstPath);
                    }
                }
                throw e;
            } finally {
                closer.close();
            }
        } else {
            mTfs.createDirectory(dstPath);
            List<String> errorMessages = Lists.newArrayList();
            String[] fileList = src.list();
            for (String file : fileList) {
                TachyonURI newPath = new TachyonURI(dstPath, new TachyonURI(file));
                File srcFile = new File(src, file);
                try {
                    copyPath(srcFile, newPath);
                } catch (IOException e) {
                    errorMessages.add(e.getMessage());
                }
            }
            if (errorMessages.size() != 0) {
                if (errorMessages.size() == fileList.length) {
                    // If no files were created, then delete the directory
                    if (mTfs.exists(dstPath)) {
                        mTfs.delete(dstPath);
                    }
                }
                throw new IOException(Joiner.on('\n').join(errorMessages));
            }
        }
    } catch (TachyonException e) {
        throw new IOException(e.getMessage());
    }
}

From source file:gobblin.filesystem.MetricsFileSystemInstrumentation.java

public MetricsFileSystemInstrumentation(FileSystem underlying) {
    super(underlying);
    this.closer = Closer.create();
    this.metricContext = new MetricContext.Builder(underlying.getUri() + "_metrics").build();
    this.metricContext = this.closer.register(metricContext);

    this.listStatusTimer = this.metricContext.timer("listStatus");
    this.listFilesTimer = this.metricContext.timer("listFiles");
    this.globStatusTimer = this.metricContext.timer("globStatus");
    this.mkdirTimer = this.metricContext.timer("mkdirs");
    this.renameTimer = this.metricContext.timer("rename");
    this.deleteTimer = this.metricContext.timer("delete");
    this.createTimer = this.metricContext.timer("create");
    this.openTimer = this.metricContext.timer("open");
    this.setOwnerTimer = this.metricContext.timer("setOwner");
    this.getFileStatusTimer = this.metricContext.timer("getFileStatus");
    this.setPermissionTimer = this.metricContext.timer("setPermission");
    this.setTimesTimer = this.metricContext.timer("setTimes");
    this.appendTimer = this.metricContext.timer("append");
    this.concatTimer = this.metricContext.timer("concat");

    this.allTimers = ImmutableList.<ContextAwareTimer>builder()
            .add(this.listStatusTimer, this.listFilesTimer, this.globStatusTimer, this.mkdirTimer,
                    this.renameTimer, this.deleteTimer, this.createTimer, this.openTimer, this.setOwnerTimer,
                    this.getFileStatusTimer, this.setPermissionTimer, this.setTimesTimer, this.appendTimer,
                    this.concatTimer)
            .build();/*  w  ww.j  av a 2 s.  com*/
}

From source file:com.b2international.snowowl.snomed.importer.rf2.validation.AbstractSnomedValidator.java

/**
 * Performs any one-time initialization necessary for the validation.
 * /* w  w w .j  a v a2 s.c om*/
 * @param monitor the SubMonitor instance to report progress on
 * @return the seen effective times
 */
protected Collection<String> preValidate(final SubMonitor monitor) {
    monitor.beginTask(MessageFormat.format("Preparing {0}s validation", importType.getDisplayName()), 1);

    final Map<String, CsvListWriter> writers = newHashMap();

    final Closer closer = Closer.create();
    try {
        final InputStreamReader releaseFileReader = closer
                .register(new InputStreamReader(releaseUrl.openStream(), CsvConstants.IHTSDO_CHARSET));
        final CsvListReader releaseFileListReader = closer
                .register(new CsvListReader(releaseFileReader, CsvConstants.IHTSDO_CSV_PREFERENCE));

        componentStagingDirectory = createStagingDirectory();

        final String[] header = releaseFileListReader.getCSVHeader(true);

        if (!StringUtils.equalsIgnoreCase(header, expectedHeader)) {
            addDefect(DefectType.HEADER_DIFFERENCES, String.format("Invalid header in '%s'", releaseFileName));
        }

        while (true) {
            final List<String> row = releaseFileListReader.read();

            if (null == row) {
                break;
            }

            final String effectiveTimeKey = getEffectiveTimeKey(row.get(1));

            if (!effectiveTimes.contains(effectiveTimeKey)) {
                effectiveTimes.add(effectiveTimeKey);

                // Use the original effective time field instead of the key
                validateEffectiveTime(row.get(1), releaseFileListReader.getLineNumber());

                final Path effectiveTimeFile = getEffectiveTimeFile(effectiveTimeKey);
                final BufferedWriter bw = closer.register(
                        Files.newBufferedWriter(effectiveTimeFile, Charsets.UTF_8, StandardOpenOption.CREATE));
                final CsvListWriter lw = closer
                        .register(new CsvListWriter(bw, CsvConstants.IHTSDO_CSV_PREFERENCE));
                writers.put(effectiveTimeKey, lw);
            }

            writers.get(effectiveTimeKey).write(row);
        }

        return ImmutableList.copyOf(effectiveTimes);
    } catch (final IOException e) {
        throw new ImportException(
                MessageFormat.format("Couldn''t read row from {0} release file.", releaseFileName), e);
    } finally {
        try {
            Closeables.close(closer, true);
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
        monitor.worked(1);
    }
}

From source file:org.eclipse.emf.eson.tests.util.ResourceProvider.java

public String loadAsStringFromURI(URI uri) throws IOException {
    URIConverter uriConverter = rs.getURIConverter();
    Closer closer = Closer.create(); // https://code.google.com/p/guava-libraries/wiki/ClosingResourcesExplained
    try {//from w  w  w . j  a  va2s.co  m
        InputStream is = closer.register(uriConverter.createInputStream(uri));
        String content = CharStreams.toString(new InputStreamReader(is, Charsets.UTF_8));
        return content;
    } catch (Throwable e) { // must catch Throwable
        throw closer.rethrow(e);
    } finally {
        closer.close();
    }
}

From source file:alluxio.job.persist.PersistDefinition.java

@Override
public SerializableVoid runTask(PersistConfig config, SerializableVoid args, JobWorkerContext context)
        throws Exception {
    AlluxioURI uri = new AlluxioURI(config.getFilePath());
    String ufsPath = config.getUfsPath();

    // check if the file is persisted in UFS and delete it, if we are overwriting it
    UfsManager.UfsClient ufsClient = context.getUfsManager().get(config.getMountId());
    try (CloseableResource<UnderFileSystem> ufsResource = ufsClient.acquireUfsResource()) {
        UnderFileSystem ufs = ufsResource.get();
        if (ufs == null) {
            throw new IOException("Failed to create UFS instance for " + ufsPath);
        }//from   w  w w .j  a va2s. com
        if (ufs.exists(ufsPath)) {
            if (config.isOverwrite()) {
                LOG.info("File {} is already persisted in UFS. Removing it.", config.getFilePath());
                ufs.deleteFile(ufsPath);
            } else {
                throw new IOException("File " + config.getFilePath()
                        + " is already persisted in UFS, to overwrite the file, please set the overwrite flag"
                        + " in the config.");
            }
        }

        FileSystem fs = FileSystem.Factory.get();
        long bytesWritten;
        try (Closer closer = Closer.create()) {
            OpenFileOptions options = OpenFileOptions.defaults().setReadType(ReadType.NO_CACHE);
            FileInStream in = closer.register(fs.openFile(uri, options));
            AlluxioURI dstPath = new AlluxioURI(ufsPath);
            // Create ancestor directories from top to the bottom. We cannot use recursive create
            // parents here because the permission for the ancestors can be different.
            Stack<Pair<String, MkdirsOptions>> ufsDirsToMakeWithOptions = new Stack<>();
            AlluxioURI curAlluxioPath = uri.getParent();
            AlluxioURI curUfsPath = dstPath.getParent();
            // Stop at the Alluxio root because the mapped directory of Alluxio root in UFS may not
            // exist.
            while (!ufs.isDirectory(curUfsPath.toString()) && curAlluxioPath != null) {
                URIStatus curDirStatus = fs.getStatus(curAlluxioPath);
                ufsDirsToMakeWithOptions.push(new Pair<>(curUfsPath.toString(),
                        MkdirsOptions.defaults().setCreateParent(false).setOwner(curDirStatus.getOwner())
                                .setGroup(curDirStatus.getGroup())
                                .setMode(new Mode((short) curDirStatus.getMode()))));
                curAlluxioPath = curAlluxioPath.getParent();
                curUfsPath = curUfsPath.getParent();
            }
            while (!ufsDirsToMakeWithOptions.empty()) {
                Pair<String, MkdirsOptions> ufsDirAndPerm = ufsDirsToMakeWithOptions.pop();
                // UFS mkdirs might fail if the directory is already created. If so, skip the mkdirs
                // and assume the directory is already prepared, regardless of permission matching.
                if (!ufs.mkdirs(ufsDirAndPerm.getFirst(), ufsDirAndPerm.getSecond())
                        && !ufs.isDirectory(ufsDirAndPerm.getFirst())) {
                    throw new IOException("Failed to create " + ufsDirAndPerm.getFirst() + " with permission "
                            + ufsDirAndPerm.getSecond().toString());
                }
            }
            URIStatus uriStatus = fs.getStatus(uri);
            OutputStream out = closer.register(
                    ufs.create(dstPath.toString(), CreateOptions.defaults().setOwner(uriStatus.getOwner())
                            .setGroup(uriStatus.getGroup()).setMode(new Mode((short) uriStatus.getMode()))));
            bytesWritten = IOUtils.copyLarge(in, out);
            incrementPersistedMetric(ufsClient.getUfsMountPointUri(), bytesWritten);
        }
        LOG.info("Persisted file {} with size {}", ufsPath, bytesWritten);
    }
    return null;
}

From source file:org.grouplens.lenskit.eval.traintest.TrainTestEvalJob.java

@SuppressWarnings("PMD.AvoidCatchingThrowable")
private void runEvaluation() throws IOException, RecommenderBuildException {
    Closer closer = Closer.create();
    try {/*w w w  . ja  v  a  2 s.  co  m*/
        TableWriter userTable = userOutputSupplier.get();
        if (userTable != null) {
            closer.register(userTable);
        }
        TableWriter predictTable = predictOutputSupplier.get();
        if (predictTable != null) {
            closer.register(predictTable);
        }

        List<Object> outputRow = Lists.newArrayList();

        ExecutionInfo execInfo = buildExecInfo();

        logger.info("Building {}", algorithm.getName());
        StopWatch buildTimer = new StopWatch();
        buildTimer.start();
        RecommenderInstance rec = algorithm.makeTestableRecommender(data, snapshot, execInfo);
        buildTimer.stop();
        logger.info("Built {} in {}", algorithm.getName(), buildTimer);

        logger.info("Measuring {}", algorithm.getName());
        for (ModelMetric metric : modelMetrics) {
            outputRow.addAll(metric.measureAlgorithm(algorithm, data, rec.getRecommender()));
        }

        logger.info("Testing {}", algorithm.getName());
        StopWatch testTimer = new StopWatch();
        testTimer.start();
        List<TestUserMetricAccumulator> evalAccums = new ArrayList<TestUserMetricAccumulator>(
                evaluators.size());

        List<Object> userRow = new ArrayList<Object>();

        UserEventDAO testUsers = data.getTestData().getUserEventDAO();
        for (TestUserMetric eval : evaluators) {
            TestUserMetricAccumulator accum = eval.makeAccumulator(algorithm, data);
            evalAccums.add(accum);
        }

        Cursor<UserHistory<Event>> userProfiles = closer.register(testUsers.streamEventsByUser());
        for (UserHistory<Event> p : userProfiles) {
            assert userRow.isEmpty();
            userRow.add(p.getUserId());

            long uid = p.getUserId();
            LongSet testItems = p.itemSet();

            Supplier<SparseVector> preds = new PredictionSupplier(rec, uid, testItems);
            Supplier<List<ScoredId>> recs = new RecommendationSupplier(rec, uid, testItems);
            Supplier<UserHistory<Event>> hist = new HistorySupplier(rec.getUserEventDAO(), uid);
            Supplier<UserHistory<Event>> testHist = Suppliers.ofInstance(p);

            TestUser test = new TestUser(uid, hist, testHist, preds, recs);

            for (TestUserMetricAccumulator accum : evalAccums) {
                Object[] ures = accum.evaluate(test);
                if (ures != null) {
                    userRow.addAll(Arrays.asList(ures));
                }
            }
            if (userTable != null) {
                try {
                    userTable.writeRow(userRow);
                } catch (IOException e) {
                    throw new RuntimeException("error writing user row", e);
                }
            }
            userRow.clear();

            if (predictTable != null) {
                writePredictions(predictTable, uid, RatingVectorUserHistorySummarizer.makeRatingVector(p),
                        test.getPredictions());
            }
        }
        testTimer.stop();
        logger.info("Tested {} in {}", algorithm.getName(), testTimer);

        writeOutput(buildTimer, testTimer, outputRow, evalAccums);
    } catch (Throwable th) {
        throw closer.rethrow(th, RecommenderBuildException.class);
    } finally {
        closer.close();
    }
}

From source file:org.sonatype.nexus.rest.feeds.sources.ErrorWarningFeedSource.java

/**
 * Extracts ERROR and WARN log lines from given log file. It returns ordered list (newest 1st, oldest last) of
 * found//  ww  w  .j  a  va2  s  .  com
 * log lines, and that list is maximized to have {@code entriesToExtract} entries.
 *
 * @param logFile          the log file to scan.
 * @param entriesToExtract The number how much "newest" entries should be collected.
 */
protected List<SyndEntry> extractEntriesFromLogfile(final File logFile, final int entriesToExtract)
        throws IOException {
    final List<SyndEntry> entries = Lists.newArrayList();
    Closer closer = Closer.create();
    try {
        final BufferedReader reader = Files.newReader(logFile, Charset.forName("UTF-8"));
        String logLine = reader.readLine();
        while (logLine != null) {
            if (logLine.contains(" WARN ") || logLine.contains(" ERROR ")) {
                final SyndEntry entry = new SyndEntryImpl();
                entry.setPublishedDate(new Date()); // FIXME: item.getEventDate();
                entry.setAuthor(getNexusAuthor());
                entry.setLink("/");

                if (logLine.contains(" ERROR ")) {
                    entry.setTitle("Error");
                } else if (logLine.contains(" WARN ")) {
                    entry.setTitle("Warning");
                }

                final StringBuilder contentValue = new StringBuilder();
                contentValue.append(logLine);

                // FIXME: Grab following stacktrace if any in log
                // if ( StringUtils.isNotEmpty( item.getStackTrace() ) )
                // {
                // // we need <br/> and &nbsp; to display stack trace on RSS
                // String stackTrace = item.getStackTrace().replace(
                // (String) System.getProperties().get( "line.separator" ),
                // "<br/>" );
                // stackTrace = stackTrace.replace( "\t", "&nbsp;&nbsp;&nbsp;&nbsp;" );
                // contentValue.append( "<br/>" ).append( stackTrace );
                // }

                SyndContent content = new SyndContentImpl();
                content.setType(MediaType.TEXT_PLAIN.toString());
                content.setValue(contentValue.toString());
                entry.setDescription(content);

                entries.add(entry);
                if (entries.size() > entriesToExtract) {
                    entries.remove(0);
                }
            }
            logLine = reader.readLine();
        }
    } catch (Throwable e) {
        throw closer.rethrow(e);
    } finally {
        closer.close();
    }
    return Lists.reverse(entries);
}

From source file:com.android.build.gradle.internal.transforms.JarMerger.java

public void addJar(@NonNull File file, boolean removeEntryTimestamp) throws IOException {
    logger.verbose("addJar(%1$s)", file);
    init();//from  w ww  .  j av  a  2s .  c  o  m

    try (Closer localCloser = Closer.create()) {
        FileInputStream fis = localCloser.register(new FileInputStream(file));
        ZipInputStream zis = localCloser.register(new ZipInputStream(fis));

        // loop on the entries of the jar file package and put them in the final jar
        ZipEntry entry;
        while ((entry = zis.getNextEntry()) != null) {
            // do not take directories or anything inside a potential META-INF folder.
            if (entry.isDirectory()) {
                continue;
            }

            String name = entry.getName();
            if (filter != null && !filter.checkEntry(name)) {
                continue;
            }

            JarEntry newEntry;

            // Preserve the STORED method of the input entry.
            if (entry.getMethod() == JarEntry.STORED) {
                newEntry = new JarEntry(entry);
            } else {
                // Create a new entry so that the compressed len is recomputed.
                newEntry = new JarEntry(name);
            }
            if (removeEntryTimestamp) {
                newEntry.setLastModifiedTime(ZERO_TIME);
                newEntry.setLastAccessTime(ZERO_TIME);
                newEntry.setCreationTime(ZERO_TIME);
            }

            // add the entry to the jar archive
            logger.verbose("addJar(%1$s): entry %2$s", file, name);
            jarOutputStream.putNextEntry(newEntry);

            // read the content of the entry from the input stream, and write it into the archive.
            int count;
            while ((count = zis.read(buffer)) != -1) {
                jarOutputStream.write(buffer, 0, count);
            }

            // close the entries for this file
            jarOutputStream.closeEntry();
            zis.closeEntry();
        }
    } catch (ZipAbortException e) {
        throw new IOException(e);
    }
}

From source file:gobblin.cluster.GobblinHelixTask.java

@Override
public TaskResult run() {
    SharedResourcesBroker<GobblinScopeTypes> globalBroker = null;
    try (Closer closer = Closer.create()) {
        closer.register(MDC.putCloseable(ConfigurationKeys.JOB_NAME_KEY, this.jobName));
        closer.register(MDC.putCloseable(ConfigurationKeys.JOB_KEY_KEY, this.jobKey));
        Path workUnitFilePath = new Path(
                this.taskConfig.getConfigMap().get(GobblinClusterConfigurationKeys.WORK_UNIT_FILE_PATH));

        String fileName = workUnitFilePath.getName();
        String storeName = workUnitFilePath.getParent().getName();
        WorkUnit workUnit;/* ww  w .  j a  va 2  s  .c om*/

        if (workUnitFilePath.getName().endsWith(AbstractJobLauncher.MULTI_WORK_UNIT_FILE_EXTENSION)) {
            workUnit = stateStores.mwuStateStore.getAll(storeName, fileName).get(0);
        } else {
            workUnit = stateStores.wuStateStore.getAll(storeName, fileName).get(0);
        }

        // The list of individual WorkUnits (flattened) to run
        List<WorkUnit> workUnits = Lists.newArrayList();

        if (workUnit instanceof MultiWorkUnit) {
            // Flatten the MultiWorkUnit so the job configuration properties can be added to each individual WorkUnits
            List<WorkUnit> flattenedWorkUnits = JobLauncherUtils
                    .flattenWorkUnits(((MultiWorkUnit) workUnit).getWorkUnits());
            workUnits.addAll(flattenedWorkUnits);
        } else {
            workUnits.add(workUnit);
        }

        globalBroker = SharedResourcesBrokerFactory.createDefaultTopLevelBroker(
                ConfigFactory.parseProperties(this.jobState.getProperties()),
                GobblinScopeTypes.GLOBAL.defaultScopeInstance());
        SharedResourcesBroker<GobblinScopeTypes> jobBroker = globalBroker
                .newSubscopedBuilder(new JobScopeInstance(this.jobState.getJobName(), this.jobState.getJobId()))
                .build();

        this.taskAttempt = new GobblinMultiTaskAttempt(workUnits.iterator(), this.jobId, this.jobState,
                this.taskStateTracker, this.taskExecutor, Optional.of(this.participantId),
                Optional.of(this.stateStores.taskStateStore), jobBroker);

        this.taskAttempt.runAndOptionallyCommitTaskAttempt(GobblinMultiTaskAttempt.CommitPolicy.IMMEDIATE);
        return new TaskResult(TaskResult.Status.COMPLETED,
                String.format("completed tasks: %d", workUnits.size()));
    } catch (InterruptedException ie) {
        Thread.currentThread().interrupt();
        return new TaskResult(TaskResult.Status.CANCELED, "");
    } catch (Throwable t) {
        LOGGER.error("GobblinHelixTask failed due to " + t.getMessage(), t);
        return new TaskResult(TaskResult.Status.ERROR, Throwables.getStackTraceAsString(t));
    } finally {
        if (globalBroker != null) {
            try {
                globalBroker.close();
            } catch (IOException ioe) {
                LOGGER.error("Could not close shared resources broker.", ioe);
            }
        }
    }
}

From source file:net.derquinse.bocas.jersey.client.BocasClient.java

private MemoryByteSource load(InputStream is) throws IOException {
    Closer closer = Closer.create();
    try {/*from  w  ww  .j ava2s .c o  m*/
        return loader.load(closer.register(is));
    } finally {
        closer.close();
    }
}