Example usage for com.google.common.base Optional or

List of usage examples for com.google.common.base Optional or

Introduction

In this page you can find the example usage for com.google.common.base Optional or.

Prototype

@Beta
public abstract T or(Supplier<? extends T> supplier);

Source Link

Document

Returns the contained instance if it is present; supplier.get() otherwise.

Usage

From source file:org.apache.spark.examples.streaming.JavaStatefulNetworkWordCount.java

public static void main(String[] args) {
    if (args.length < 2) {
        System.err.println("Usage: JavaStatefulNetworkWordCount <hostname> <port>");
        System.exit(1);/*from w w w.  ja v  a 2s .co m*/
    }

    StreamingExamples.setStreamingLogLevels();

    // Update the cumulative count function
    final Function2<List<Integer>, Optional<Integer>, Optional<Integer>> updateFunction = new Function2<List<Integer>, Optional<Integer>, Optional<Integer>>() {
        @Override
        public Optional<Integer> call(List<Integer> values, Optional<Integer> state) {
            Integer newSum = state.or(0);
            for (Integer value : values) {
                newSum += value;
            }
            return Optional.of(newSum);
        }
    };

    // Create the context with a 1 second batch size
    SparkConf sparkConf = new SparkConf().setAppName("JavaStatefulNetworkWordCount");
    JavaStreamingContext ssc = new JavaStreamingContext(sparkConf, Durations.seconds(1));
    ssc.checkpoint(".");

    // Initial RDD input to updateStateByKey
    List<Tuple2<String, Integer>> tuples = Arrays.asList(new Tuple2<String, Integer>("hello", 1),
            new Tuple2<String, Integer>("world", 1));
    JavaPairRDD<String, Integer> initialRDD = ssc.sc().parallelizePairs(tuples);

    JavaReceiverInputDStream<String> lines = ssc.socketTextStream(args[0], Integer.parseInt(args[1]),
            StorageLevels.MEMORY_AND_DISK_SER_2);

    JavaDStream<String> words = lines.flatMap(new FlatMapFunction<String, String>() {
        @Override
        public Iterable<String> call(String x) {
            return Lists.newArrayList(SPACE.split(x));
        }
    });

    JavaPairDStream<String, Integer> wordsDstream = words
            .mapToPair(new PairFunction<String, String, Integer>() {
                @Override
                public Tuple2<String, Integer> call(String s) {
                    return new Tuple2<String, Integer>(s, 1);
                }
            });

    // This will give a Dstream made of state (which is the cumulative count of the words)
    JavaPairDStream<String, Integer> stateDstream = wordsDstream.updateStateByKey(updateFunction,
            new HashPartitioner(ssc.sc().defaultParallelism()), initialRDD);

    stateDstream.print();
    ssc.start();
    ssc.awaitTermination();
}

From source file:com.naltel.spark.JavaStatefulNetworkWordCount.java

public static void main(String[] args) {
    if (args.length < 2) {
        System.err.println("Usage: JavaStatefulNetworkWordCount <hostname> <port>");
        System.exit(1);//from w  w w .ja  v  a 2s .c om
    }

    StreamingExamples.setStreamingLogLevels();

    // Update the cumulative count function
    final Function2<List<Integer>, Optional<Integer>, Optional<Integer>> updateFunction = new Function2<List<Integer>, Optional<Integer>, Optional<Integer>>() {
        @Override
        public Optional<Integer> call(List<Integer> values, Optional<Integer> state) {
            Integer newSum = state.or(0);
            for (Integer value : values) {
                newSum += value;
            }
            return Optional.of(newSum);
        }
    };

    // Create the context with a 1 second batch size
    SparkConf sparkConf = new SparkConf().setAppName("JavaStatefulNetworkWordCount");
    JavaStreamingContext ssc = new JavaStreamingContext(sparkConf, Durations.seconds(1));
    ssc.checkpoint(".");

    // Initial RDD input to updateStateByKey
    @SuppressWarnings("unchecked")
    List<Tuple2<String, Integer>> tuples = Arrays.asList(new Tuple2<String, Integer>("hello", 1),
            new Tuple2<String, Integer>("world", 1));
    JavaPairRDD<String, Integer> initialRDD = ssc.sc().parallelizePairs(tuples);

    JavaReceiverInputDStream<String> lines = ssc.socketTextStream(args[0], Integer.parseInt(args[1]),
            StorageLevels.MEMORY_AND_DISK_SER_2);

    JavaDStream<String> words = lines.flatMap(new FlatMapFunction<String, String>() {
        @Override
        public Iterable<String> call(String x) {
            return Lists.newArrayList(SPACE.split(x));
        }
    });

    @SuppressWarnings("serial")
    JavaPairDStream<String, Integer> wordsDstream = words
            .mapToPair(new PairFunction<String, String, Integer>() {
                @Override
                public Tuple2<String, Integer> call(String s) {
                    return new Tuple2<String, Integer>(s, 1);
                }
            });

    // This will give a Dstream made of state (which is the cumulative count of the words)
    // JavaPairDStream<String, Integer> stateDstream = wordsDstream.updateStateByKey(updateFunction,
    //         new HashPartitioner(ssc.sc().defaultParallelism()), initialRDD);

    //stateDstream.print();
    ssc.start();
    ssc.awaitTermination();
}

From source file:com.weibangong.spark.streaming.JavaStatefulNetworkWordCount.java

public static void main(String[] args) {
    if (args.length < 2) {
        System.err.println("Usage: JavaStatefulNetworkWordCount <hostname> <port>");
        System.exit(1);/*from ww  w.ja va  2 s.c o  m*/
    }

    // Create the context with a 1 second batch size
    SparkConf sparkConf = new SparkConf().setAppName("JavaStatefulNetworkWordCount");
    JavaStreamingContext ssc = new JavaStreamingContext(sparkConf, Durations.seconds(1));
    ssc.checkpoint(".");

    // Initial state RDD input to mapWithState
    @SuppressWarnings("unchecked")
    List<Tuple2<String, Integer>> tuples = Arrays.asList(new Tuple2<String, Integer>("hello", 1),
            new Tuple2<String, Integer>("world", 1));
    JavaPairRDD<String, Integer> initialRDD = ssc.sparkContext().parallelizePairs(tuples);

    JavaReceiverInputDStream<String> lines = ssc.socketTextStream(args[0], Integer.parseInt(args[1]),
            StorageLevels.MEMORY_AND_DISK_SER_2);

    JavaDStream<String> words = lines.flatMap(new FlatMapFunction<String, String>() {
        @Override
        public Iterable<String> call(String x) {
            return Lists.newArrayList(SPACE.split(x));
        }
    });

    JavaPairDStream<String, Integer> wordsDstream = words
            .mapToPair(new PairFunction<String, String, Integer>() {
                @Override
                public Tuple2<String, Integer> call(String s) {
                    return new Tuple2<String, Integer>(s, 1);
                }
            });

    // Update the cumulative count function
    final Function3<String, Optional<Integer>, State<Integer>, Tuple2<String, Integer>> mappingFunc = new Function3<String, Optional<Integer>, State<Integer>, Tuple2<String, Integer>>() {

        @Override
        public Tuple2<String, Integer> call(String word, Optional<Integer> one, State<Integer> state) {
            int sum = one.or(0) + (state.exists() ? state.get() : 0);
            Tuple2<String, Integer> output = new Tuple2<String, Integer>(word, sum);
            state.update(sum);
            return output;
        }
    };

    // DStream made of get cumulative counts that get updated in every batch
    JavaMapWithStateDStream<String, Integer, Integer, Tuple2<String, Integer>> stateDstream = wordsDstream
            .mapWithState(StateSpec.function(mappingFunc).initialState(initialRDD));

    stateDstream.print();
    ssc.start();
    ssc.awaitTermination();
}

From source file:com.sdw.dream.spark.examples.streaming.JavaStatefulNetworkWordCount.java

public static void main(String[] args) {
    if (args.length < 2) {
        System.err.println("Usage: JavaStatefulNetworkWordCount <hostname> <port>");
        System.exit(1);/*from  w  w  w  . j  a v a  2  s.  com*/
    }

    StreamingExamples.setStreamingLogLevels();

    // Create the context with a 1 second batch size
    SparkConf sparkConf = new SparkConf().setAppName("JavaStatefulNetworkWordCount");
    JavaStreamingContext ssc = new JavaStreamingContext(sparkConf, Durations.seconds(1));
    ssc.checkpoint(".");

    // Initial state RDD input to mapWithState
    @SuppressWarnings("unchecked")
    List<Tuple2<String, Integer>> tuples = Arrays.asList(new Tuple2<String, Integer>("hello", 1),
            new Tuple2<String, Integer>("world", 1));
    JavaPairRDD<String, Integer> initialRDD = ssc.sparkContext().parallelizePairs(tuples);

    JavaReceiverInputDStream<String> lines = ssc.socketTextStream(args[0], Integer.parseInt(args[1]),
            StorageLevels.MEMORY_AND_DISK_SER_2);

    JavaDStream<String> words = lines.flatMap(new FlatMapFunction<String, String>() {
        @Override
        public Iterable<String> call(String x) {
            return Lists.newArrayList(SPACE.split(x));
        }
    });

    JavaPairDStream<String, Integer> wordsDstream = words
            .mapToPair(new PairFunction<String, String, Integer>() {
                @Override
                public Tuple2<String, Integer> call(String s) {
                    return new Tuple2<String, Integer>(s, 1);
                }
            });

    // Update the cumulative count function
    final Function3<String, Optional<Integer>, State<Integer>, Tuple2<String, Integer>> mappingFunc = new Function3<String, Optional<Integer>, State<Integer>, Tuple2<String, Integer>>() {

        @Override
        public Tuple2<String, Integer> call(String word, Optional<Integer> one, State<Integer> state) {
            int sum = one.or(0) + (state.exists() ? state.get() : 0);
            Tuple2<String, Integer> output = new Tuple2<String, Integer>(word, sum);
            state.update(sum);
            return output;
        }
    };

    // DStream made of get cumulative counts that get updated in every batch
    JavaMapWithStateDStream<String, Integer, Integer, Tuple2<String, Integer>> stateDstream = wordsDstream
            .mapWithState(StateSpec.function(mappingFunc).initialState(initialRDD));

    stateDstream.print();
    ssc.start();
    ssc.awaitTermination();
}

From source file:com.erix.streaming.OpenCVFeatureCount.java

public static void main(String[] args) {
    if (args.length < 1) {
        System.err.println("Usage: OpenCVFeatureCount <nats url>");
        System.exit(1);/*from w ww  .j av a 2  s  .  co  m*/
    }
    String nats_url = args[0];

    final NatsClient nc = new NatsClient(nats_url);
    System.out.println("About to connect to nats server at : " + nats_url);

    // Update the cumulative count function
    final Function2<List<Integer>, Optional<Integer>, Optional<Integer>> updateFunction = new Function2<List<Integer>, Optional<Integer>, Optional<Integer>>() {
        @Override
        public Optional<Integer> call(List<Integer> values, Optional<Integer> state) {
            Integer newSum = state.or(0);
            for (Integer value : values) {
                newSum += value;
            }
            return Optional.of(newSum);
        }
    };

    //nc.Connect(nats_url);
    //nc.Subscribe("foo");
    //nc.Publish("foo", "Java Nats Client");
    //StreamingExamples.setStreamingLogLevels();

    // Create the context with a 1 second batch size
    SparkConf sparkConf = new SparkConf().setAppName("OpenCVStatefulFeatureCount");
    JavaStreamingContext ssc = new JavaStreamingContext(sparkConf, Durations.seconds(1));
    ssc.checkpoint("./ck");

    // Initial RDD input to updateStateByKey
    List<Tuple2<String, Integer>> tuples = Arrays.asList(new Tuple2<String, Integer>("0", 0),
            new Tuple2<String, Integer>("0", 0));
    JavaPairRDD<String, Integer> initialRDD = ssc.sc().parallelizePairs(tuples);

    JavaReceiverInputDStream<String> lines = ssc.receiverStream(nc);
    JavaDStream<String> words = lines.flatMap(new FlatMapFunction<String, String>() {
        @Override
        public Iterable<String> call(String x) {
            //System.out.println("Recevied x:"+x);
            String[] ins = SPACE.split(x.replace("\"", ""));
            //for (int i=0;i<ins.length ;i++ ) {
            //  ins[i]=ins[i].replace("\"","");
            //}
            return Lists.newArrayList(ins);
        }
    });
    JavaPairDStream<String, Integer> wordsDstream = words
            .mapToPair(new PairFunction<String, String, Integer>() {
                @Override
                public Tuple2<String, Integer> call(String s) {
                    return new Tuple2<String, Integer>(s, 1);
                }
            });

    // This will give a Dstream made of state (which is the cumulative count of the words)
    JavaPairDStream<String, Integer> stateDstream = wordsDstream.updateStateByKey(updateFunction,
            new HashPartitioner(ssc.sc().defaultParallelism()), initialRDD);

    stateDstream.print();
    stateDstream.foreachRDD(new Function2<JavaPairRDD<String, Integer>, Time, Void>() {
        @Override
        public Void call(JavaPairRDD<String, Integer> rdd, Time time) throws IOException {
            //String counts = "Counts at time " + time + " " + rdd.collect();
            //System.out.println(counts);
            nc.Publish("bar", rdd.collect().toString());
            return null;
        }
    });
    ssc.start();
    ssc.awaitTermination();
}

From source file:com.sparkz.streamcount.WordCount.java

public static void main(String[] args) {

    SparkConf config = new SparkConf();
    config.setAppName("Word Count");
    Duration batchDuration = new Duration(1000);
    JavaSparkContext ctx = new JavaSparkContext(config);
    JavaSparkContext.jarOfClass(org.apache.spark.streaming.State.class);
    JavaSparkContext.jarOfClass(org.apache.spark.streaming.StateSpec.class);
    ctx.addFile("/home/cloudera/Downloads/spark-streaming_2.10-1.6.0.jar");
    JavaStreamingContext jssc = new JavaStreamingContext(ctx, batchDuration);
    jssc.checkpoint(".");
    final int threshold = Integer.parseInt(args[0]);

    // Initial state RDD input to mapWithState
    @SuppressWarnings("unchecked")
    List<Tuple2<String, Integer>> tuples = Arrays.asList(new Tuple2<String, Integer>("hello", 1),
            new Tuple2<String, Integer>("world", 1));
    JavaPairRDD<String, Integer> initialRDD = jssc.sparkContext().parallelizePairs(tuples);

    JavaReceiverInputDStream<String> lines = jssc.socketTextStream("127.0.0.1", 37337,
            StorageLevels.MEMORY_AND_DISK_SER_2);

    // split each document into words
    JavaDStream<String> tokenized = lines.flatMap(new FlatMapFunction<String, String>() {
        private static final long serialVersionUID = 1L;

        @Override//ww w .j  av a2  s  .  c  om
        public Iterable<String> call(String s) {
            return Arrays.asList(SPACE.split(s));
        }
    });

    // count the occurrence of each word
    JavaPairDStream<String, Integer> wordsDstream = tokenized
            .mapToPair(new PairFunction<String, String, Integer>() {
                private static final long serialVersionUID = 1L;

                @Override
                public Tuple2<String, Integer> call(String s) {
                    return new Tuple2<String, Integer>(s, 1);
                }
            });

    // Update the cumulative count function
    final Function3<String, Optional<Integer>, State<Integer>, Tuple2<String, Integer>> mappingFunc = new Function3<String, Optional<Integer>, State<Integer>, Tuple2<String, Integer>>() {

        private static final long serialVersionUID = 1L;

        @Override
        public Tuple2<String, Integer> call(String word, Optional<Integer> one, State<Integer> state) {
            int sum = one.or(0) + (state.exists() ? state.get() : 0);
            Tuple2<String, Integer> output = new Tuple2<String, Integer>(word, sum);
            state.update(sum);
            return output;
        }
    };

    // DStream made of get cumulative counts that get updated in every batch
    JavaMapWithStateDStream<String, Integer, Integer, Tuple2<String, Integer>> stateDstream = wordsDstream
            .mapWithState(StateSpec.function(mappingFunc).initialState(initialRDD));

    stateDstream.print();

    JavaDStream<Tuple2<String, Integer>> filteredStream = stateDstream
            .filter(new Function<Tuple2<String, Integer>, Boolean>() {

                private static final long serialVersionUID = 1L;

                @Override
                public Boolean call(Tuple2<String, Integer> state) throws Exception {
                    return state._2 > threshold;
                }
            });

    filteredStream.print();

    jssc.start();
    jssc.awaitTermination();

    jssc.close();

}

From source file:org.metastatic.treediff.Main.java

public static void main(String... argv) throws Exception {
    Optional<Command> command = Optional.absent();
    LongOpt[] longOpts = new LongOpt[] { new LongOpt("checksum", LongOpt.NO_ARGUMENT, null, CHECKSUM),
            new LongOpt("diff", LongOpt.NO_ARGUMENT, null, DIFF),
            new LongOpt("patch", LongOpt.NO_ARGUMENT, null, PATCH),
            new LongOpt("hash", LongOpt.REQUIRED_ARGUMENT, null, 'h'),
            new LongOpt("hash-length", LongOpt.REQUIRED_ARGUMENT, null, 'l'),
            new LongOpt("sums-file", LongOpt.REQUIRED_ARGUMENT, null, 's'),
            new LongOpt("diff-file", LongOpt.REQUIRED_ARGUMENT, null, 'd'),
            new LongOpt("output", LongOpt.REQUIRED_ARGUMENT, null, 'o'),
            new LongOpt("verbose", LongOpt.NO_ARGUMENT, null, 'v'),
            new LongOpt("strict-hash", LongOpt.NO_ARGUMENT, null, 'H'),
            new LongOpt("size-only", LongOpt.NO_ARGUMENT, null, 'S'),
            new LongOpt("help", LongOpt.NO_ARGUMENT, null, HELP),
            new LongOpt("version", LongOpt.NO_ARGUMENT, null, VERSION) };
    Optional<MessageDigest> hash = Optional.absent();
    Optional<Integer> hashLength = Optional.absent();
    Optional<String> inputFile = Optional.absent();
    Optional<String> outputFile = Optional.absent();
    Optional<DiffCheck> diffCheck = Optional.of(DiffCheck.SizeAndTime);
    Getopt getopt = new Getopt(Main.class.getName(), argv, "h:l:s:d:o:vH", longOpts);
    int ch;/*  w w w .  j  a v a  2  s.  c o m*/
    while ((ch = getopt.getopt()) != -1) {
        switch (ch) {
        case CHECKSUM:
            if (command.isPresent()) {
                System.err.printf("%s: only specify one command.%n", Main.class.getName());
                System.exit(1);
                return;
            }
            command = Optional.of(Command.Checksum);
            break;

        case DIFF:
            if (command.isPresent()) {
                System.err.printf("%s: only specify one command.%n", Main.class.getName());
                System.exit(1);
                return;
            }
            command = Optional.of(Command.Diff);
            break;

        case PATCH:
            if (command.isPresent()) {
                System.err.printf("%s: only specify one command.%n", Main.class.getName());
                System.exit(1);
                return;
            }
            command = Optional.of(Command.Patch);

        case HELP:
            help();
            System.exit(0);
            break;

        case VERSION:
            version();
            System.exit(0);
            break;

        case 'h':
            checkCommand("--hash", command, EnumSet.of(Command.Checksum));
            try {
                hash = Optional.of(MessageDigest.getInstance(getopt.getOptarg()));
            } catch (NoSuchAlgorithmException nsae) {
                try {
                    hash = Optional.of(MessageDigest.getInstance(getopt.getOptarg(), new JarsyncProvider()));
                } catch (NoSuchAlgorithmException nsae2) {
                    System.err.printf("%s: no such hash: %s%n", Main.class.getName(), getopt.getOptarg());
                    System.exit(1);
                    return;
                }
            }
            break;

        case 'l':
            checkCommand("--hash-length", command, EnumSet.of(Command.Checksum));
            try {
                hashLength = Optional.of(Integer.parseInt(getopt.getOptarg()));
            } catch (NumberFormatException nfe) {
                System.err.printf("%s: --hash-length: invalid number.%n", Main.class.getName());
                System.exit(1);
                return;
            }
            break;

        case 's':
            checkCommand("--sums-file", command, EnumSet.of(Command.Diff));
            inputFile = Optional.of(getopt.getOptarg());
            break;

        case 'd':
            checkCommand("--diff-file", command, EnumSet.of(Command.Patch));
            inputFile = Optional.of(getopt.getOptarg());
            break;

        case 'o':
            checkCommand("--output", command, EnumSet.of(Command.Checksum, Command.Diff));
            outputFile = Optional.of(getopt.getOptarg());
            break;

        case 'v':
            verbosity++;
            break;

        case 'H':
            checkCommand("--strict-hash", command, EnumSet.of(Command.Diff));
            diffCheck = Optional.of(DiffCheck.StrictHash);
            break;

        case 'S':
            checkCommand("--size-only", command, EnumSet.of(Command.Diff));
            diffCheck = Optional.of(DiffCheck.SizeOnly);
            break;

        case '?':
            System.err.printf("Try `%s --help' for more info.%n", Main.class.getName());
            System.exit(1);
            return;
        }
    }

    if (!command.isPresent()) {
        System.err.printf("%s: must supply a command.%n", Main.class.getName());
        System.exit(1);
        return;
    }

    switch (command.get()) {
    case Checksum: {
        if (!hash.isPresent())
            hash = Optional.of(MessageDigest.getInstance("Murmur3", new JarsyncProvider()));
        if (!hashLength.isPresent())
            hashLength = Optional.of(hash.get().getDigestLength());
        else if (hashLength.get() <= 0 || hashLength.get() > hash.get().getDigestLength()) {
            System.err.printf("%s: invalid hash length: %d.%n", Main.class.getName(), hashLength.get());
            System.exit(1);
            return;
        }
        if (!outputFile.isPresent()) {
            System.err.printf("%s: --output argument required.%n", Main.class.getName());
            System.exit(1);
            return;
        }
        if (getopt.getOptind() >= argv.length) {
            System.err.printf("%s: must specify at least one file or directory.%n", Main.class.getName());
            System.exit(1);
            return;
        }
        DataOutputStream output = new DataOutputStream(
                new BufferedOutputStream(new FileOutputStream(outputFile.get())));
        output.write(SUMS_MAGIC);
        String alg = hash.get().getAlgorithm();
        output.writeUTF(alg);
        output.writeInt(hashLength.get());
        checksum(hash.get(), hashLength.get(), output,
                Arrays.asList(argv).subList(getopt.getOptind(), argv.length));
        output.close();
        break;
    }

    case Diff: {
        if (!inputFile.isPresent()) {
            System.err.printf("%s: --diff: option --sums-file required.%n", Main.class.getName());
            System.exit(1);
            return;
        }
        if (!outputFile.isPresent()) {
            System.err.printf("%s: --output argument required.%n", Main.class.getName());
            System.exit(1);
            return;
        }
        DataInputStream input = new DataInputStream(new FileInputStream(inputFile.get()));
        byte[] magic = new byte[8];
        input.readFully(magic);
        if (!Arrays.equals(magic, SUMS_MAGIC)) {
            System.err.printf("%s: %s: invalid file header.%n", Main.class.getName(), inputFile.get());
            System.exit(1);
            return;
        }
        String alg = input.readUTF();
        try {
            hash = Optional.of(MessageDigest.getInstance(alg, new JarsyncProvider()));
        } catch (NoSuchAlgorithmException nsae) {
            hash = Optional.of(MessageDigest.getInstance(alg));
        }
        hashLength = Optional.of(input.readInt());
        if (hashLength.get() <= 0 || hashLength.get() > hash.get().getDigestLength()) {
            System.err.printf("%s: invalid hash length: %d.%n", Main.class.getName(), hashLength.get());
            System.exit(1);
            return;
        }
        DataOutputStream output = new DataOutputStream(
                new BufferedOutputStream(new FileOutputStream(outputFile.get())));
        output.write(DIFF_MAGIC);
        output.writeUTF(alg);
        output.writeInt(hashLength.get());
        diff(hash.get(), hashLength.get(), input, output, diffCheck.or(DiffCheck.SizeAndTime));
        output.close();
        break;
    }

    case Patch:
        throw new Error("not yet implemented");
    }
}

From source file:io.urmia.util.ArgumentParseUtil.java

public static String getZooKeeperURL(String[] args) {
    Optional<String> zkConfig = getArgument(args, "-z", "--zk");
    return zkConfig.or(DEFAULT_ZK_SERVER);
}

From source file:org.jmingo.util.DocumentUtils.java

public static Object getIdValue(Object document) {
    Object value = null;//from w ww.j av a 2  s.c  o m
    Validate.notNull(document, "getIdValue: object to get id cannot be null");
    List<Field> currFields = getFields(document.getClass());
    Optional<Field> fieldOptional = getIdField(currFields);
    Field field = fieldOptional.or(getIdField(getInheritedFields(document.getClass()))).orNull();
    if (field != null) {
        field.setAccessible(true);
        try {
            value = field.get(document);
        } catch (IllegalAccessException e) {
            throw Throwables.propagate(e);
        }
    }
    return value;
}

From source file:org.liquigraph.cli.LiquigraphCli.java

private static void printVersion() {
    Optional<String> version = getVersion();
    System.out.println(version.or("Unknown version!"));
}