Example usage for com.google.common.io Closer register

List of usage examples for com.google.common.io Closer register

Introduction

In this page you can find the example usage for com.google.common.io Closer register.

Prototype


public <C extends Closeable> C register(@Nullable C closeable) 

Source Link

Document

Registers the given closeable to be closed when this Closer is #close closed .

Usage

From source file:org.apache.gobblin.runtime.mapreduce.MRJobLauncher.java

/**
 * Prepare the job input.// ww  w .ja  va2 s. c o  m
 * @throws IOException
 */
private void prepareJobInput(List<WorkUnit> workUnits) throws IOException {
    Closer closer = Closer.create();
    try {
        ParallelRunner parallelRunner = closer
                .register(new ParallelRunner(this.parallelRunnerThreads, this.fs));

        int multiTaskIdSequence = 0;
        // Serialize each work unit into a file named after the task ID
        for (WorkUnit workUnit : workUnits) {

            String workUnitFileName;
            if (workUnit instanceof MultiWorkUnit) {
                workUnitFileName = JobLauncherUtils.newMultiTaskId(this.jobContext.getJobId(),
                        multiTaskIdSequence++) + MULTI_WORK_UNIT_FILE_EXTENSION;
            } else {
                workUnitFileName = workUnit.getProp(ConfigurationKeys.TASK_ID_KEY) + WORK_UNIT_FILE_EXTENSION;
            }
            Path workUnitFile = new Path(this.jobInputPath, workUnitFileName);
            LOG.debug("Writing work unit file " + workUnitFileName);

            parallelRunner.serializeToFile(workUnit, workUnitFile);

            // Append the work unit file path to the job input file
        }
    } catch (Throwable t) {
        throw closer.rethrow(t);
    } finally {
        closer.close();
    }
}

From source file:org.gbif.occurrence.download.oozie.ArchiveBuilder.java

/**
 * Adds an eml file per dataset involved into a subfolder "dataset" which is supported by our dwc archive reader.
 * Create a rights.txt and citation.txt file targeted at humans to quickly yield an overview about rights and
 * datasets involved.//  w  w w  .j  a va 2s  .  c  o  m
 */
private void addConstituentMetadata() throws IOException {

    Path citationSrc = new Path(hdfsPath + Path.SEPARATOR + citationTable);

    LOG.info("Adding constituent dataset metadata to archive, based on: " + citationSrc);

    // now read the dataset citation table and create an EML file per datasetId
    // first copy from HDFS to local file
    if (!hdfs.exists(citationSrc)) {
        LOG.warn("No citation file directory existing on HDFS, skip creating of dataset metadata {}",
                citationSrc);
        return;
    }

    final Map<UUID, Integer> srcDatasets = readDatasetCounts(citationSrc);

    File emlDir = new File(archiveDir, "dataset");
    if (!srcDatasets.isEmpty()) {
        emlDir.mkdir();
    }
    Closer closer = Closer.create();

    Writer rightsWriter = closer.register(FileUtils.startNewUtf8File(new File(archiveDir, RIGHTS_FILENAME)));
    Writer citationWriter = closer
            .register(FileUtils.startNewUtf8File(new File(archiveDir, CITATIONS_FILENAME)));

    closer.register(citationWriter);
    // write fixed citations header
    citationWriter.write(CITATION_HEADER);
    // now iterate over constituent UUIDs

    for (Entry<UUID, Integer> dsEntry : srcDatasets.entrySet()) {
        final UUID constituentId = dsEntry.getKey();
        LOG.info("Processing constituent dataset: {}", constituentId);
        // catch errors for each uuid to make sure one broken dataset does not bring down the entire process
        try {
            Dataset srcDataset = datasetService.get(constituentId);

            // citation
            String citationLink = writeCitation(citationWriter, srcDataset, constituentId);
            // rights
            writeRights(rightsWriter, srcDataset, citationLink);
            // eml file
            createEmlFile(constituentId, emlDir);

            // add as constituent for later
            constituents.add(new Constituent(srcDataset.getTitle(), dsEntry.getValue()));

            // add original author as content provider to main dataset description
            Contact provider = getContentProviderContact(srcDataset);
            if (provider != null) {
                dataset.getContacts().add(provider);
            }
        } catch (UniformInterfaceException e) {
            LOG.error(String.format("Registry client http exception: %d \n %s", e.getResponse().getStatus(),
                    e.getResponse().getEntity(String.class)), e);
        } catch (Exception e) {
            LOG.error("Error creating download file", e);
        }
    }
    closer.close();
}

From source file:org.grouplens.lenskit.eval.data.crossfold.CrossfoldTask.java

/**
 * Write train-test split files/* w ww  .  j av a  2s  . c o m*/
 *
 * @throws IOException if there is an error writing the files.
 */
@SuppressWarnings("PMD.AvoidCatchingThrowable")
protected void createTTFiles() throws IOException {
    File[] trainFiles = getFiles(getTrainPattern());
    File[] testFiles = getFiles(getTestPattern());
    RatingWriter[] trainWriters = new RatingWriter[partitionCount];
    RatingWriter[] testWriters = new RatingWriter[partitionCount];
    Closer closer = Closer.create();
    try {
        for (int i = 0; i < partitionCount; i++) {
            File train = trainFiles[i];
            File test = testFiles[i];
            trainWriters[i] = closer.register(makeWriter(train));
            testWriters[i] = closer.register(makeWriter(test));
        }
        switch (method) {
        case PARTITION_USERS:
        case SAMPLE_USERS:
            writeTTFilesByUsers(trainWriters, testWriters);
            break;
        case PARTITION_RATINGS:
            writeTTFilesByRatings(trainWriters, testWriters);
            break;
        }
    } catch (Throwable th) {
        throw closer.rethrow(th);
    } finally {
        closer.close();
    }
}

From source file:gobblin.yarn.GobblinYarnAppLauncher.java

private void setupSecurityTokens(ContainerLaunchContext containerLaunchContext) throws IOException {
    Credentials credentials = UserGroupInformation.getCurrentUser().getCredentials();
    String tokenRenewer = this.yarnConfiguration.get(YarnConfiguration.RM_PRINCIPAL);
    if (tokenRenewer == null || tokenRenewer.length() == 0) {
        throw new IOException("Failed to get master Kerberos principal for the RM to use as renewer");
    }/* w  ww.j av  a  2s  .c  o m*/

    // For now, only getting tokens for the default file-system.
    Token<?> tokens[] = this.fs.addDelegationTokens(tokenRenewer, credentials);
    if (tokens != null) {
        for (Token<?> token : tokens) {
            LOGGER.info("Got delegation token for " + this.fs.getUri() + "; " + token);
        }
    }

    Closer closer = Closer.create();
    try {
        DataOutputBuffer dataOutputBuffer = closer.register(new DataOutputBuffer());
        credentials.writeTokenStorageToStream(dataOutputBuffer);
        ByteBuffer fsTokens = ByteBuffer.wrap(dataOutputBuffer.getData(), 0, dataOutputBuffer.getLength());
        containerLaunchContext.setTokens(fsTokens);
    } catch (Throwable t) {
        throw closer.rethrow(t);
    } finally {
        closer.close();
    }
}

From source file:org.gbif.occurrence.download.oozie.ArchiveBuilder.java

public void createEmlFile(final UUID constituentId, final File emlDir) throws IOException {
    Closer closer = Closer.create();
    try {//w ww . j  a  va  2s .  co m
        // store dataset EML as constituent metadata
        InputStream in = closer.register(datasetService.getMetadataDocument(constituentId));
        if (in != null) {
            // copy into archive, reading stream from registry services
            OutputStream out = closer.register(new FileOutputStream(new File(emlDir, constituentId + ".xml")));
            ByteStreams.copy(in, out);
        } else {
            LOG.error("Found no EML for datasetId {}", constituentId);
        }

    } catch (FileNotFoundException ex) {
        LOG.error("Error creating eml file", ex);
    } catch (IOException ex) {
        LOG.error("Error creating eml file", ex);
    } finally {
        closer.close();
    }
}

From source file:com.spotify.apollo.core.ServiceImpl.java

ListeningExecutorService createExecutorService(Closer closer) {
    final ListeningExecutorService executorService = MoreExecutors.listeningDecorator(Executors
            .newCachedThreadPool(new ThreadFactoryBuilder().setNameFormat(serviceName + "-worker-%d").build()));
    closer.register(asCloseable(executorService));
    return executorService;
}

From source file:gobblin.data.management.conversion.hive.validation.ValidationJob.java

/***
 * Execute Hive queries using {@link HiveJdbcConnector} and validate results.
 * @param queries Queries to execute.//from  ww w .  ja  v a  2 s .  c o m
 */
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "SQL_NONCONSTANT_STRING_PASSED_TO_EXECUTE", justification = "Temporary fix")
private List<Long> getValidationOutputFromHive(List<String> queries) throws IOException {

    if (null == queries || queries.size() == 0) {
        log.warn("No queries specified to be executed");
        return Collections.emptyList();
    }

    List<Long> rowCounts = Lists.newArrayList();
    Closer closer = Closer.create();

    try {
        HiveJdbcConnector hiveJdbcConnector = closer.register(HiveJdbcConnector.newConnectorWithProps(props));
        for (String query : queries) {
            String hiveOutput = "hiveConversionValidationOutput_" + UUID.randomUUID().toString();
            Path hiveTempDir = new Path("/tmp" + Path.SEPARATOR + hiveOutput);
            query = "INSERT OVERWRITE DIRECTORY '" + hiveTempDir + "' " + query;
            log.info("Executing query: " + query);
            try {
                if (this.hiveSettings.size() > 0) {
                    hiveJdbcConnector
                            .executeStatements(this.hiveSettings.toArray(new String[this.hiveSettings.size()]));
                }
                hiveJdbcConnector.executeStatements("SET hive.exec.compress.output=false",
                        "SET hive.auto.convert.join=false", query);
                FileStatus[] fileStatusList = this.fs.listStatus(hiveTempDir);
                List<FileStatus> files = new ArrayList<>();
                for (FileStatus fileStatus : fileStatusList) {
                    if (fileStatus.isFile()) {
                        files.add(fileStatus);
                    }
                }
                if (files.size() > 1) {
                    log.warn("Found more than one output file. Should have been one.");
                } else if (files.size() == 0) {
                    log.warn("Found no output file. Should have been one.");
                } else {
                    String theString = IOUtils.toString(
                            new InputStreamReader(this.fs.open(files.get(0).getPath()), Charsets.UTF_8));
                    log.info("Found row count: " + theString.trim());
                    if (StringUtils.isBlank(theString.trim())) {
                        rowCounts.add(0l);
                    } else {
                        try {
                            rowCounts.add(Long.parseLong(theString.trim()));
                        } catch (NumberFormatException e) {
                            throw new RuntimeException("Could not parse Hive output: " + theString.trim(), e);
                        }
                    }
                }
            } finally {
                if (this.fs.exists(hiveTempDir)) {
                    log.debug("Deleting temp dir: " + hiveTempDir);
                    this.fs.delete(hiveTempDir, true);
                }
            }
        }
    } catch (SQLException e) {
        throw new RuntimeException(e);
    } finally {
        try {
            closer.close();
        } catch (Exception e) {
            log.warn("Could not close HiveJdbcConnector", e);
        }
    }

    return rowCounts;
}

From source file:org.apache.gobblin.data.management.conversion.hive.validation.ValidationJob.java

/***
 * Execute Hive queries using {@link HiveJdbcConnector} and validate results.
 * @param queries Queries to execute.//from w  w w.j a  va  2 s  . com
 */
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "SQL_NONCONSTANT_STRING_PASSED_TO_EXECUTE", justification = "Temporary fix")
private List<Long> getValidationOutputFromHive(List<String> queries) throws IOException {

    if (null == queries || queries.size() == 0) {
        log.warn("No queries specified to be executed");
        return Collections.emptyList();
    }

    List<Long> rowCounts = Lists.newArrayList();
    Closer closer = Closer.create();

    try {
        HiveJdbcConnector hiveJdbcConnector = closer.register(HiveJdbcConnector.newConnectorWithProps(props));
        for (String query : queries) {
            String hiveOutput = "hiveConversionValidationOutput_" + UUID.randomUUID().toString();
            Path hiveTempDir = new Path("/tmp" + Path.SEPARATOR + hiveOutput);
            query = "INSERT OVERWRITE DIRECTORY '" + hiveTempDir + "' " + query;
            log.info("Executing query: " + query);
            try {
                if (this.hiveSettings.size() > 0) {
                    hiveJdbcConnector
                            .executeStatements(this.hiveSettings.toArray(new String[this.hiveSettings.size()]));
                }
                hiveJdbcConnector.executeStatements("SET hive.exec.compress.output=false",
                        "SET hive.auto.convert.join=false", query);
                FileStatus[] fileStatusList = this.fs.listStatus(hiveTempDir);
                List<FileStatus> files = new ArrayList<>();
                for (FileStatus fileStatus : fileStatusList) {
                    if (fileStatus.isFile()) {
                        files.add(fileStatus);
                    }
                }
                if (files.size() > 1) {
                    log.warn("Found more than one output file. Should have been one.");
                } else if (files.size() == 0) {
                    log.warn("Found no output file. Should have been one.");
                } else {
                    String theString = IOUtils.toString(
                            new InputStreamReader(this.fs.open(files.get(0).getPath()), Charsets.UTF_8));
                    log.info("Found row count: " + theString.trim());
                    if (StringUtils.isBlank(theString.trim())) {
                        rowCounts.add(0l);
                    } else {
                        try {
                            rowCounts.add(Long.parseLong(theString.trim()));
                        } catch (NumberFormatException e) {
                            throw new RuntimeException("Could not parse Hive output: " + theString.trim(), e);
                        }
                    }
                }
            } finally {
                if (this.fs.exists(hiveTempDir)) {
                    log.debug("Deleting temp dir: " + hiveTempDir);
                    this.fs.delete(hiveTempDir, true);
                }
            }
        }
    } catch (SQLException e) {
        log.warn("Execution failed for query set " + queries.toString(), e);
    } finally {
        try {
            closer.close();
        } catch (Exception e) {
            log.warn("Could not close HiveJdbcConnector", e);
        }
    }

    return rowCounts;
}

From source file:com.adobe.epubcheck.xml.XMLParser.java

public void process() {
    try {//from   w  w  w.  java2 s  . c  om
        Closer closer = Closer.create();
        try {
            InputStream in = closer.register(context.resourceProvider.getInputStream(path));
            // System.err.println("DEBUG XMLParser#process on" + resource);
            if (!in.markSupported()) {
                in = new BufferedInputStream(in);
            }

            String encoding = sniffEncoding(in);
            if (encoding != null && !encoding.equals("UTF-8") && !encoding.equals("UTF-16")) {
                report.message(MessageId.CSS_003, EPUBLocation.create(path, ""), encoding);
            }

            InputSource ins = new InputSource(in);
            ins.setSystemId(zipRoot + path);
            parser.parse(ins, this);

        } catch (Throwable e) {
            // ensure that any checked exception types other than IOException that
            // could be thrown are
            // provided here, e.g. throw closer.rethrow(e,
            // CheckedException.class);
            // throw closer.rethrow(e);
            throw closer.rethrow(e, SAXException.class);
        } finally {
            closer.close();
        }
    } catch (FileNotFoundException e) {
        String message = e.getMessage();
        message = new File(message).getName();
        int p = message.indexOf("(");
        if (p > 0) {
            message = message.substring(0, message.indexOf("("));
        }
        message = message.trim();
        report.message(MessageId.RSC_001, EPUBLocation.create(path), message);
    } catch (IOException e) {
        report.message(MessageId.PKG_008, EPUBLocation.create(path), path);
    } catch (IllegalArgumentException e) {
        report.message(MessageId.RSC_005, EPUBLocation.create(path), e.getMessage());
    } catch (SAXException e) {
        report.message(MessageId.RSC_005, EPUBLocation.create(path), e.getMessage());
    }
}

From source file:org.jclouds.vsphere.compute.config.VSphereComputeServiceAdapter.java

@Override
public VirtualMachine getNode(String vmName) {
    Closer closer = Closer.create();
    VSphereServiceInstance instance = serviceInstance.get();
    closer.register(instance);
    try {//from  w  ww.  ja va  2  s .c o m
        try {
            return getVM(vmName, instance.getInstance().getRootFolder());
        } catch (Throwable t) {
            throw closer.rethrow(t);
        } finally {
            closer.close();
        }
    } catch (IOException e) {
        Throwables.propagateIfPossible(e);
    }
    return null;
}