Example usage for com.google.common.io Closer create

List of usage examples for com.google.common.io Closer create

Introduction

In this page you can find the example usage for com.google.common.io Closer create.

Prototype

public static Closer create() 

Source Link

Document

Creates a new Closer .

Usage

From source file:org.apache.gobblin.metastore.util.DatabaseJobHistoryStoreSchemaManager.java

public static void main(String[] args) throws IOException {
    if (args.length < 1 || args.length > 2) {
        printUsage();//from w w  w.  j  a  v  a2 s  .com
    }
    Closer closer = Closer.create();
    try {
        CompositeConfiguration config = new CompositeConfiguration();
        config.addConfiguration(new SystemConfiguration());
        if (args.length == 2) {
            config.addConfiguration(new PropertiesConfiguration(args[1]));
        }
        Properties properties = getProperties(config);
        DatabaseJobHistoryStoreSchemaManager schemaManager = closer
                .register(DatabaseJobHistoryStoreSchemaManager.builder(properties).build());
        if (String.CASE_INSENSITIVE_ORDER.compare("migrate", args[0]) == 0) {
            schemaManager.migrate();
        } else if (String.CASE_INSENSITIVE_ORDER.compare("info", args[0]) == 0) {
            schemaManager.info();
        } else {
            printUsage();
        }
    } catch (Throwable t) {
        throw closer.rethrow(t);
    } finally {
        closer.close();
    }
}

From source file:org.apache.jackrabbit.oak.plugins.tika.TextExtractorMain.java

public static void main(String[] args) throws Exception {
    Closer closer = Closer.create();
    String h = "tika [extract|report|generate]\n" + "\n"
            + "report   : Generates a summary report related to binary data\n"
            + "extract  : Performs the text extraction\n"
            + "generate : Generates the csv data file based on configured NodeStore/BlobStore";
    try {//from  ww w. ja  va 2s .c  om
        OptionParser parser = new OptionParser();
        OptionSpec<?> help = parser.acceptsAll(asList("h", "?", "help"), "show help").forHelp();

        OptionSpec<String> nodeStoreSpec = parser
                .accepts("nodestore", "NodeStore detail /path/to/oak/repository | mongodb://host:port/database")
                .withRequiredArg().ofType(String.class);

        OptionSpec segmentTar = parser.accepts("segment-tar", "Use oak-segment-tar instead of oak-segment");

        OptionSpec<String> pathSpec = parser
                .accepts("path", "Path in repository under which the binaries would be searched")
                .withRequiredArg().ofType(String.class);

        OptionSpec<File> dataFileSpec = parser
                .accepts("data-file", "Data file in csv format containing the binary metadata")
                .withRequiredArg().ofType(File.class);

        OptionSpec<File> tikaConfigSpec = parser.accepts("tika-config", "Tika config file path")
                .withRequiredArg().ofType(File.class);

        OptionSpec<File> fdsDirSpec = parser.accepts("fds-path", "Path of directory used by FileDataStore")
                .withRequiredArg().ofType(File.class);

        OptionSpec<File> s3ConfigSpec = parser
                .accepts("s3-config-path", "Path of properties file containing config for S3DataStore")
                .withRequiredArg().ofType(File.class);

        OptionSpec<File> storeDirSpec = parser
                .accepts("store-path", "Path of directory used to store extracted text content")
                .withRequiredArg().ofType(File.class);

        OptionSpec<Integer> poolSize = parser
                .accepts("pool-size", "Size of the thread pool used to perform text extraction. Defaults "
                        + "to number of cores on the system")
                .withRequiredArg().ofType(Integer.class);

        OptionSpec<String> nonOption = parser.nonOptions(h);

        OptionSet options = parser.parse(args);
        List<String> nonOptions = nonOption.values(options);

        if (options.has(help)) {
            parser.printHelpOn(System.out);
            System.exit(0);
        }

        if (nonOptions.isEmpty()) {
            parser.printHelpOn(System.err);
            System.exit(1);
        }

        boolean report = nonOptions.contains("report");
        boolean extract = nonOptions.contains("extract");
        boolean generate = nonOptions.contains("generate");
        File dataFile = null;
        File storeDir = null;
        File tikaConfigFile = null;
        BlobStore blobStore = null;
        BinaryResourceProvider binaryResourceProvider = null;
        BinaryStats stats = null;
        String path = "/";

        if (options.has(tikaConfigSpec)) {
            tikaConfigFile = tikaConfigSpec.value(options);
            checkArgument(tikaConfigFile.exists(), "Tika config file %s does not exist",
                    tikaConfigFile.getAbsolutePath());
        }

        if (options.has(storeDirSpec)) {
            storeDir = storeDirSpec.value(options);
            if (storeDir.exists()) {
                checkArgument(storeDir.isDirectory(),
                        "Path [%s] specified for storing extracted " + "text content '%s' is not a directory",
                        storeDir.getAbsolutePath(), storeDirSpec.options());
            }
        }

        if (options.has(fdsDirSpec)) {
            File fdsDir = fdsDirSpec.value(options);
            checkArgument(fdsDir.exists(), "FileDataStore %s does not exist", fdsDir.getAbsolutePath());
            FileDataStore fds = new FileDataStore();
            fds.setPath(fdsDir.getAbsolutePath());
            fds.init(null);
            blobStore = new DataStoreBlobStore(fds);
        }

        if (options.has(s3ConfigSpec)) {
            File s3Config = s3ConfigSpec.value(options);
            checkArgument(s3Config.exists() && s3Config.canRead(),
                    "S3DataStore config cannot be read from [%s]", s3Config.getAbsolutePath());
            Properties props = loadProperties(s3Config);
            log.info("Loaded properties for S3DataStore from {}", s3Config.getAbsolutePath());
            String pathProp = "path";
            String repoPath = props.getProperty(pathProp);
            checkNotNull(repoPath, "Missing required property [%s] from S3DataStore config loaded from [%s]",
                    pathProp, s3Config);

            //Check if 'secret' key is defined. It should be non null for references
            //to be generated. As the ref are transient we can just use any random value
            //if not specified
            String secretConfig = "secret";
            if (props.getProperty(secretConfig) == null) {
                props.setProperty(secretConfig, UUID.randomUUID().toString());
            }

            log.info("Using {} for S3DataStore ", repoPath);
            DataStore ds = createS3DataStore(props);
            PropertiesUtil.populate(ds, toMap(props), false);
            ds.init(pathProp);
            blobStore = new DataStoreBlobStore(ds);
            closer.register(asCloseable(ds));
        }

        if (options.has(dataFileSpec)) {
            dataFile = dataFileSpec.value(options);
        }

        checkNotNull(dataFile, "Data file not configured with %s", dataFileSpec);

        if (report || extract) {
            checkArgument(dataFile.exists(), "Data file %s does not exist", dataFile.getAbsolutePath());

            binaryResourceProvider = new CSVFileBinaryResourceProvider(dataFile, blobStore);
            if (binaryResourceProvider instanceof Closeable) {
                closer.register((Closeable) binaryResourceProvider);
            }

            stats = new BinaryStats(tikaConfigFile, binaryResourceProvider);
            String summary = stats.getSummary();
            log.info(summary);
        }

        if (generate) {
            String src = nodeStoreSpec.value(options);
            checkNotNull(blobStore,
                    "BlobStore found to be null. FileDataStore directory " + "must be specified via %s",
                    fdsDirSpec.options());
            checkNotNull(dataFile, "Data file path not provided");
            NodeStore nodeStore = bootStrapNodeStore(src, options.has(segmentTar), blobStore, closer);
            BinaryResourceProvider brp = new NodeStoreBinaryResourceProvider(nodeStore, blobStore);
            CSVFileGenerator generator = new CSVFileGenerator(dataFile);
            generator.generate(brp.getBinaries(path));
        }

        if (extract) {
            checkNotNull(storeDir, "Directory to store extracted text content " + "must be specified via %s",
                    storeDirSpec.options());
            checkNotNull(blobStore,
                    "BlobStore found to be null. FileDataStore directory " + "must be specified via %s",
                    fdsDirSpec.options());

            DataStoreTextWriter writer = new DataStoreTextWriter(storeDir, false);
            TextExtractor extractor = new TextExtractor(writer);

            if (options.has(poolSize)) {
                extractor.setThreadPoolSize(poolSize.value(options));
            }

            if (tikaConfigFile != null) {
                extractor.setTikaConfig(tikaConfigFile);
            }

            if (options.has(pathSpec)) {
                path = pathSpec.value(options);
            }

            closer.register(writer);
            closer.register(extractor);

            extractor.setStats(stats);
            log.info("Using path {}", path);
            extractor.extract(binaryResourceProvider.getBinaries(path));

            extractor.close();
            writer.close();
        }

    } catch (Throwable e) {
        throw closer.rethrow(e);
    } finally {
        closer.close();
    }
}

From source file:com.google.devtools.build.android.desugar.scan.KeepScanner.java

public static void main(String... args) throws Exception {
    OptionsParser parser = OptionsParser.newOptionsParser(KeepScannerOptions.class);
    parser.setAllowResidue(false);/* w w w . ja  va 2  s.  c  o  m*/
    parser.enableParamsFileSupport(new ShellQuotedParamsFilePreProcessor(FileSystems.getDefault()));
    parser.parseAndExitUponError(args);
    KeepScannerOptions options = parser.getOptions(KeepScannerOptions.class);

    Map<String, ImmutableSet<KeepReference>> seeds;
    try (Closer closer = Closer.create()) {
        // TODO(kmb): Try to share more of this code with Desugar binary
        IndexedInputs classpath = new IndexedInputs(toRegisteredInputFileProvider(closer, options.classpath));
        IndexedInputs bootclasspath = new IndexedInputs(
                toRegisteredInputFileProvider(closer, options.bootclasspath));

        // Construct classloader from classpath.  Since we're assuming the prefix we're looking for
        // isn't part of the input itself we shouldn't need to include the input in the classloader.
        CoreLibraryRewriter noopRewriter = new CoreLibraryRewriter("");
        ClassLoader classloader = new HeaderClassLoader(classpath, noopRewriter,
                new HeaderClassLoader(bootclasspath, noopRewriter, new ThrowingClassLoader()));
        seeds = scan(checkNotNull(options.inputJars), options.prefix, classloader);
    }

    try (PrintStream out = new PrintStream(Files.newOutputStream(options.keepDest, CREATE),
            /*autoFlush=*/ false, "UTF-8")) {
        writeKeepDirectives(out, seeds);
    }
}

From source file:com.technobium.MultinomialLogisticRegression.java

public static void main(String[] args) throws Exception {
    // this test trains a 3-way classifier on the famous Iris dataset.
    // a similar exercise can be accomplished in R using this code:
    //    library(nnet)
    //    correct = rep(0,100)
    //    for (j in 1:100) {
    //      i = order(runif(150))
    //      train = iris[i[1:100],]
    //      test = iris[i[101:150],]
    //      m = multinom(Species ~ Sepal.Length + Sepal.Width + Petal.Length + Petal.Width, train)
    //      correct[j] = mean(predict(m, newdata=test) == test$Species)
    //    }/*from   ww w .jav  a 2  s .c om*/
    //    hist(correct)
    //
    // Note that depending on the training/test split, performance can be better or worse.
    // There is about a 5% chance of getting accuracy < 90% and about 20% chance of getting accuracy
    // of 100%
    //
    // This test uses a deterministic split that is neither outstandingly good nor bad

    RandomUtils.useTestSeed();
    Splitter onComma = Splitter.on(",");

    // read the data
    List<String> raw = Resources.readLines(Resources.getResource("iris.csv"), Charsets.UTF_8);

    // holds features
    List<Vector> data = Lists.newArrayList();

    // holds target variable
    List<Integer> target = Lists.newArrayList();

    // for decoding target values
    Dictionary dict = new Dictionary();

    // for permuting data later
    List<Integer> order = Lists.newArrayList();

    for (String line : raw.subList(1, raw.size())) {
        // order gets a list of indexes
        order.add(order.size());

        // parse the predictor variables
        Vector v = new DenseVector(5);
        v.set(0, 1);
        int i = 1;
        Iterable<String> values = onComma.split(line);
        for (String value : Iterables.limit(values, 4)) {
            v.set(i++, Double.parseDouble(value));
        }
        data.add(v);

        // and the target
        target.add(dict.intern(Iterables.get(values, 4)));
    }

    // randomize the order ... original data has each species all together
    // note that this randomization is deterministic
    Random random = RandomUtils.getRandom();
    Collections.shuffle(order, random);

    // select training and test data
    List<Integer> train = order.subList(0, 100);
    List<Integer> test = order.subList(100, 150);
    logger.warn("Training set = {}", train);
    logger.warn("Test set = {}", test);

    // now train many times and collect information on accuracy each time
    int[] correct = new int[test.size() + 1];
    for (int run = 0; run < 200; run++) {
        OnlineLogisticRegression lr = new OnlineLogisticRegression(3, 5, new L2(1));
        // 30 training passes should converge to > 95% accuracy nearly always but never to 100%
        for (int pass = 0; pass < 30; pass++) {
            Collections.shuffle(train, random);
            for (int k : train) {
                lr.train(target.get(k), data.get(k));
            }
        }

        // check the accuracy on held out data
        int x = 0;
        int[] count = new int[3];
        for (Integer k : test) {
            Vector vt = lr.classifyFull(data.get(k));
            int r = vt.maxValueIndex();
            count[r]++;
            x += r == target.get(k) ? 1 : 0;
        }
        correct[x]++;

        if (run == 199) {

            Vector v = new DenseVector(5);
            v.set(0, 1);
            int i = 1;
            Iterable<String> values = onComma.split("6.0,2.7,5.1,1.6,versicolor");
            for (String value : Iterables.limit(values, 4)) {
                v.set(i++, Double.parseDouble(value));
            }

            Vector vt = lr.classifyFull(v);
            for (String value : dict.values()) {
                System.out.println("target:" + value);
            }
            int t = dict.intern(Iterables.get(values, 4));

            int r = vt.maxValueIndex();
            boolean flag = r == t;
            lr.close();

            Closer closer = Closer.create();

            try {
                FileOutputStream byteArrayOutputStream = closer
                        .register(new FileOutputStream(new File("model.txt")));
                DataOutputStream dataOutputStream = closer
                        .register(new DataOutputStream(byteArrayOutputStream));
                PolymorphicWritable.write(dataOutputStream, lr);
            } finally {
                closer.close();
            }
        }
    }

    // verify we never saw worse than 95% correct,
    for (int i = 0; i < Math.floor(0.95 * test.size()); i++) {
        System.out.println(String.format("%d trials had unacceptable accuracy of only %.0f%%: ", correct[i],
                100.0 * i / test.size()));
    }
    // nor perfect
    System.out.println(String.format("%d trials had unrealistic accuracy of 100%%", correct[test.size() - 1]));
}

From source file:gobblin.metastore.util.StateStoreCleaner.java

public static void main(String[] args) throws IOException {
    if (args.length != 1) {
        System.err.println("Usage: " + StateStoreCleaner.class.getSimpleName() + " <configuration file>");
        System.exit(1);/*ww w .  j  a va2  s.  c o m*/
    }

    Closer closer = Closer.create();
    try {
        Properties properties = new Properties();
        properties.load(closer.register(new FileInputStream(args[0])));
        closer.register(new StateStoreCleaner(properties)).run();
    } catch (Throwable t) {
        throw closer.rethrow(t);
    } finally {
        closer.close();
    }
}

From source file:gobblin.runtime.util.JobStateToJsonConverter.java

@SuppressWarnings("all")
public static void main(String[] args) throws Exception {
    Option sysConfigOption = Option.builder("sc").argName("system configuration file")
            .desc("Gobblin system configuration file").longOpt("sysconfig").hasArgs().build();
    Option storeUrlOption = Option.builder("u").argName("gobblin state store URL")
            .desc("Gobblin state store root path URL").longOpt("storeurl").hasArgs().required().build();
    Option jobNameOption = Option.builder("n").argName("gobblin job name").desc("Gobblin job name")
            .longOpt("name").hasArgs().required().build();
    Option jobIdOption = Option.builder("i").argName("gobblin job id").desc("Gobblin job id").longOpt("id")
            .hasArgs().build();//ww  w.j a  va2 s.c  om
    Option convertAllOption = Option.builder("a")
            .desc("Whether to convert all past job states of the given job").longOpt("all").build();
    Option keepConfigOption = Option.builder("kc").desc("Whether to keep all configuration properties")
            .longOpt("keepConfig").build();
    Option outputToFile = Option.builder("t").argName("output file name").desc("Output file name")
            .longOpt("toFile").hasArgs().build();

    Options options = new Options();
    options.addOption(sysConfigOption);
    options.addOption(storeUrlOption);
    options.addOption(jobNameOption);
    options.addOption(jobIdOption);
    options.addOption(convertAllOption);
    options.addOption(keepConfigOption);
    options.addOption(outputToFile);

    CommandLine cmd = null;
    try {
        CommandLineParser parser = new DefaultParser();
        cmd = parser.parse(options, args);
    } catch (ParseException pe) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("JobStateToJsonConverter", options);
        System.exit(1);
    }

    Properties sysConfig = new Properties();
    if (cmd.hasOption(sysConfigOption.getLongOpt())) {
        sysConfig = JobConfigurationUtils.fileToProperties(cmd.getOptionValue(sysConfigOption.getLongOpt()));
    }

    JobStateToJsonConverter converter = new JobStateToJsonConverter(sysConfig, cmd.getOptionValue('u'),
            cmd.hasOption("kc"));
    StringWriter stringWriter = new StringWriter();
    if (cmd.hasOption('i')) {
        converter.convert(cmd.getOptionValue('n'), cmd.getOptionValue('i'), stringWriter);
    } else {
        if (cmd.hasOption('a')) {
            converter.convertAll(cmd.getOptionValue('n'), stringWriter);
        } else {
            converter.convert(cmd.getOptionValue('n'), stringWriter);
        }
    }

    if (cmd.hasOption('t')) {
        Closer closer = Closer.create();
        try {
            FileOutputStream fileOutputStream = closer.register(new FileOutputStream(cmd.getOptionValue('t')));
            OutputStreamWriter outputStreamWriter = closer.register(
                    new OutputStreamWriter(fileOutputStream, ConfigurationKeys.DEFAULT_CHARSET_ENCODING));
            BufferedWriter bufferedWriter = closer.register(new BufferedWriter(outputStreamWriter));
            bufferedWriter.write(stringWriter.toString());
        } catch (Throwable t) {
            throw closer.rethrow(t);
        } finally {
            closer.close();
        }
    } else {
        System.out.println(stringWriter.toString());
    }
}

From source file:org.apache.gobblin.runtime.util.JobStateToJsonConverter.java

@SuppressWarnings("all")
public static void main(String[] args) throws Exception {
    Option sysConfigOption = Option.builder("sc").argName("system configuration file")
            .desc("Gobblin system configuration file (required if no state store URL specified)")
            .longOpt("sysconfig").hasArg().build();
    Option storeUrlOption = Option.builder("u").argName("gobblin state store URL")
            .desc("Gobblin state store root path URL (required if no sysconfig specified)").longOpt("storeurl")
            .hasArg().build();//from   w w  w.  ja  va2  s.c  o m
    Option jobNameOption = Option.builder("n").argName("gobblin job name").desc("Gobblin job name (required)")
            .longOpt("name").hasArg().required().build();
    Option jobIdOption = Option.builder("i").argName("gobblin job id").desc("Gobblin job id").longOpt("id")
            .hasArg().build();
    Option convertAllOption = Option.builder("a")
            .desc("Whether to convert all past job states of the given job").longOpt("all").build();
    Option keepConfigOption = Option.builder("kc").desc("Whether to keep all configuration properties")
            .longOpt("keepConfig").build();
    Option outputToFile = Option.builder("t").argName("output file name").desc("Output file name")
            .longOpt("toFile").hasArg().build();

    Options options = new Options();
    options.addOption(sysConfigOption);
    options.addOption(storeUrlOption);
    options.addOption(jobNameOption);
    options.addOption(jobIdOption);
    options.addOption(convertAllOption);
    options.addOption(keepConfigOption);
    options.addOption(outputToFile);

    CommandLine cmd = null;
    try {
        CommandLineParser parser = new DefaultParser();
        cmd = parser.parse(options, args);
    } catch (ParseException pe) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("JobStateToJsonConverter", options);
        System.exit(1);
    }

    if (!cmd.hasOption(sysConfigOption.getLongOpt()) && !cmd.hasOption(storeUrlOption.getLongOpt())) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("JobStateToJsonConverter", options);
        System.exit(1);
    }

    Properties sysConfig = new Properties();
    if (cmd.hasOption(sysConfigOption.getLongOpt())) {
        sysConfig = JobConfigurationUtils.fileToProperties(cmd.getOptionValue(sysConfigOption.getLongOpt()));
    }

    JobStateToJsonConverter converter = new JobStateToJsonConverter(sysConfig, cmd.getOptionValue('u'),
            cmd.hasOption("kc"));
    StringWriter stringWriter = new StringWriter();
    if (cmd.hasOption('i')) {
        converter.convert(cmd.getOptionValue('n'), cmd.getOptionValue('i'), stringWriter);
    } else {
        if (cmd.hasOption('a')) {
            converter.convertAll(cmd.getOptionValue('n'), stringWriter);
        } else {
            converter.convert(cmd.getOptionValue('n'), stringWriter);
        }
    }

    if (cmd.hasOption('t')) {
        Closer closer = Closer.create();
        try {
            FileOutputStream fileOutputStream = closer.register(new FileOutputStream(cmd.getOptionValue('t')));
            OutputStreamWriter outputStreamWriter = closer.register(
                    new OutputStreamWriter(fileOutputStream, ConfigurationKeys.DEFAULT_CHARSET_ENCODING));
            BufferedWriter bufferedWriter = closer.register(new BufferedWriter(outputStreamWriter));
            bufferedWriter.write(stringWriter.toString());
        } catch (Throwable t) {
            throw closer.rethrow(t);
        } finally {
            closer.close();
        }
    } else {
        System.out.println(stringWriter.toString());
    }
}

From source file:com.facebook.presto.util.PrestoCloseables.java

public static Closeable combineCloseables(Closeable first, Closeable second) {
    requireNonNull(first, "first is null");
    requireNonNull(second, "second is null");

    Closer closer = Closer.create();
    closer.register(first);/* w  w  w .  jav a2  s .co  m*/
    closer.register(second);
    return closer;
}

From source file:com.clank.launcher.LauncherUtils.java

public static Properties loadProperties(Class<?> clazz, String name, String extraProperty) throws IOException {
    Closer closer = Closer.create();
    Properties prop = new Properties();
    try {/*w w w  .j a  va  2  s .c om*/
        InputStream in = closer.register(clazz.getResourceAsStream(name));
        prop.load(in);
        String extraPath = System.getProperty(extraProperty);
        if (extraPath != null) {
            log.info("Loading extra properties for " + clazz.getCanonicalName() + ":" + name + " from "
                    + extraPath + "...");
            in = closer.register(new BufferedInputStream(closer.register(new FileInputStream(extraPath))));
            prop.load(in);
        }
    } finally {
        closer.close();
    }

    return prop;
}

From source file:org.glowroot.weaving.ClassLoaders.java

public static void defineClassesInBootstrapClassLoader(Collection<LazyDefinedClass> lazyDefinedClasses,
        Instrumentation instrumentation, File generatedJarFile) throws IOException {
    Closer closer = Closer.create();
    try {/*  w  w w . j a  va 2 s . c  o m*/
        FileOutputStream out = closer.register(new FileOutputStream(generatedJarFile));
        JarOutputStream jarOut = closer.register(new JarOutputStream(out));
        generate(lazyDefinedClasses, jarOut);
    } catch (Throwable t) {
        closer.rethrow(t);
    } finally {
        closer.close();
    }
    instrumentation.appendToBootstrapClassLoaderSearch(new JarFile(generatedJarFile));
}