Example usage for com.google.common.base Preconditions checkArgument

List of usage examples for com.google.common.base Preconditions checkArgument

Introduction

In this page you can find the example usage for com.google.common.base Preconditions checkArgument.

Prototype

public static void checkArgument(boolean expression) 

Source Link

Document

Ensures the truth of an expression involving one or more parameters to the calling method.

Usage

From source file:org.apache.streams.sysomos.provider.SysomosProvider.java

/**
 * To use from command line://from w w w .j a v  a 2 s . com
 * <p/>
 * Supply configuration similar to src/test/resources/rss.conf
 * <p/>
 * Launch using:
 * <p/>
 * mvn exec:java -Dexec.mainClass=org.apache.streams.rss.provider.RssStreamProvider -Dexec.args="rss.conf articles.json"
 * @param args args
 * @throws Exception Exception
 */
public static void main(String[] args) throws Exception {

    Preconditions.checkArgument(args.length >= 2);

    String configfile = args[0];
    String outfile = args[1];

    Config reference = ConfigFactory.load();
    File file = new File(configfile);
    assert (file.exists());
    Config testResourceConfig = ConfigFactory.parseFileAnySyntax(file,
            ConfigParseOptions.defaults().setAllowMissing(false));

    Config typesafe = testResourceConfig.withFallback(reference).resolve();

    StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
    SysomosConfiguration config = new ComponentConfigurator<>(SysomosConfiguration.class)
            .detectConfiguration(typesafe, "rss");
    SysomosProvider provider = new SysomosProvider(config);

    ObjectMapper mapper = StreamsJacksonMapper.getInstance();

    PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
    provider.prepare(config);
    provider.startStream();
    do {
        Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(),
                TimeUnit.MILLISECONDS);
        for (StreamsDatum datum : provider.readCurrent()) {
            String json;
            try {
                json = mapper.writeValueAsString(datum.getDocument());
                outStream.println(json);
            } catch (JsonProcessingException ex) {
                System.err.println(ex.getMessage());
            }
        }
    } while (provider.isRunning());
    provider.cleanUp();
    outStream.flush();
}

From source file:org.apache.streams.rss.provider.RssStreamProvider.java

/**
 * To use from command line:/*from w  w w. j  a v  a  2 s .c  o m*/
 *
 * <p/>
 * Supply configuration similar to src/test/resources/rss.conf
 *
 * <p/>
 * Launch using:
 *
 * <p/>
 * mvn exec:java -Dexec.mainClass=org.apache.streams.rss.provider.RssStreamProvider -Dexec.args="rss.conf articles.json"
 * @param args args
 * @throws Exception Exception
 */
public static void main(String[] args) throws Exception {

    Preconditions.checkArgument(args.length >= 2);

    String configfile = args[0];
    String outfile = args[1];

    Config reference = ConfigFactory.load();
    File file = new File(configfile);
    assert (file.exists());
    Config testResourceConfig = ConfigFactory.parseFileAnySyntax(file,
            ConfigParseOptions.defaults().setAllowMissing(false));

    Config typesafe = testResourceConfig.withFallback(reference).resolve();

    StreamsConfiguration streamsConfiguration = StreamsConfigurator.detectConfiguration(typesafe);
    RssStreamConfiguration config = new ComponentConfigurator<>(RssStreamConfiguration.class)
            .detectConfiguration(typesafe, "rss");
    RssStreamProvider provider = new RssStreamProvider(config);

    ObjectMapper mapper = StreamsJacksonMapper.getInstance();

    PrintStream outStream = new PrintStream(new BufferedOutputStream(new FileOutputStream(outfile)));
    provider.prepare(config);
    provider.startStream();
    do {
        Uninterruptibles.sleepUninterruptibly(streamsConfiguration.getBatchFrequencyMs(),
                TimeUnit.MILLISECONDS);
        for (StreamsDatum datum : provider.readCurrent()) {
            String json;
            try {
                json = mapper.writeValueAsString(datum.getDocument());
                outStream.println(json);
            } catch (JsonProcessingException ex) {
                System.err.println(ex.getMessage());
            }
        }
    } while (provider.isRunning());
    provider.cleanUp();
    outStream.flush();
}

From source file:org.apache.fluo.mini.MiniFluoImpl.java

public static void main(String[] args) {

    try {//  w  w  w  .  ja  v a2  s  .c o  m
        if (args.length != 1) {
            System.err.println("Usage: MiniFluoImpl <fluoPropsPath>");
            System.exit(-1);
        }
        String propsPath = args[0];
        Objects.requireNonNull(propsPath);
        File propsFile = new File(propsPath);
        if (!propsFile.exists()) {
            System.err.println("ERROR - Fluo properties file does not exist: " + propsPath);
            System.exit(-1);
        }
        Preconditions.checkArgument(propsFile.exists());

        FluoConfiguration config = new FluoConfiguration(propsFile);
        if (!config.hasRequiredMiniFluoProps()) {
            log.error("Failed to start MiniFluo - fluo.properties is missing required properties for "
                    + "MiniFluo");
            System.exit(-1);
        }
        try (MiniFluo mini = new MiniFluoImpl(config)) {
            log.info("MiniFluo is running");

            while (true) {
                UtilWaitThread.sleep(1000);
            }
        }
    } catch (Exception e) {
        log.error("Exception running MiniFluo: ", e);
    }

    log.info("MiniFluo is exiting.");
}

From source file:com.cloudera.util.StatusHttpServer.java

/**
 * Test harness to get precompiled jsps working.
 * //ww w .j  a v a  2 s. c o  m
 * @param argv
 */
public static void main(String[] argv) {
    Preconditions.checkArgument(argv.length == 3);
    String name = argv[0];
    String path = argv[1];
    int port = Integer.parseInt(argv[2]);

    try {
        StatusHttpServer http = new StatusHttpServer(name, path, "0.0.0.0", port, false);
        http.start();
    } catch (IOException ioe) {
        ioe.printStackTrace();
    }
}

From source file:com.yahoo.spaclu.data.index.IndexFeatureValueSpark.java

public static void main(String[] args) throws IOException {
    IndexFeatureValueOptions optionsFormatRawToDatabase = new IndexFeatureValueOptions(args);

    String inputPathString = optionsFormatRawToDatabase.getInputPath();
    String outputPathString = optionsFormatRawToDatabase.getOutputPath();
    String indexPathString = optionsFormatRawToDatabase.getIndexPath();
    int numberOfPartitions = optionsFormatRawToDatabase.getNumberOfPartitions();
    int maxCutoffThreshold = optionsFormatRawToDatabase.getMaximumCutoffThreshold();
    int minCutoffThreshold = optionsFormatRawToDatabase.getMinimumCutoffThreshold();

    /*/*from   w  w  w  . j  a v  a 2 s  .c o  m*/
     * Set<String> excludingFeatureNames = new HashSet<String>();
     * excludingFeatureNames.add("login");
     * excludingFeatureNames.add("time"); excludingFeatureNames.add("day");
     * excludingFeatureNames.add("hms"); excludingFeatureNames.add("fail");
     */

    sLogger.info("Tool: " + IndexFeatureValueSpark.class.getSimpleName());
    sLogger.info(" - input path: " + inputPathString);
    sLogger.info(" - output path: " + outputPathString);
    sLogger.info(" - index path: " + indexPathString);
    sLogger.info(" - number of partitions: " + numberOfPartitions);
    sLogger.info(" - maximum cutoff: " + maxCutoffThreshold);
    sLogger.info(" - minimum cutoff: " + minCutoffThreshold);

    // Create a default hadoop configuration
    Configuration conf = new Configuration();

    // Parse created config to the HDFS
    FileSystem fs = FileSystem.get(conf);

    Path outputPath = new Path(outputPathString);
    if (fs.exists(outputPath)) {
        fs.delete(outputPath, true);
    }

    SparkConf sparkConf = new SparkConf().setAppName(optionsFormatRawToDatabase.toString());

    JavaSparkContext sc = new JavaSparkContext(sparkConf);

    Map<Integer, String> featureIndices = getFeatureIndices(sc.textFile(indexPathString));

    List<Integer> listOfAllFeatureIndices = new LinkedList<Integer>();
    List<String> listOfAllFeatureInfo = new LinkedList<String>();
    Iterator<Integer> indexIter = featureIndices.keySet().iterator();
    while (indexIter.hasNext()) {
        Integer tempKey = indexIter.next();
        listOfAllFeatureIndices.add(tempKey);
        listOfAllFeatureInfo.add(featureIndices.get(tempKey));
    }

    /*
     * 
     * 
     * 
     * 
     * 
     * 
     * 
     */

    JavaRDD<String> rawLines = sc.textFile(inputPathString).repartition(numberOfPartitions);

    JavaRDD<String[]> tokenizedLines = rawLines.map(new LineFilter(listOfAllFeatureIndices));
    JavaPairRDD<Entry<Integer, String>, Long> featureValuesCounts = tokenizedLines
            .flatMapToPair(new FeatureValueMapper()).reduceByKey(new FeatureValueReducer());

    Map<Integer, Builder<String, Long>> featureValueMapping = new Hashtable<Integer, Builder<String, Long>>();
    Iterator<Tuple2<Entry<Integer, String>, Long>> iter = featureValuesCounts.collect().iterator();
    while (iter.hasNext()) {
        Tuple2<Entry<Integer, String>, Long> temp = iter.next();
        Entry<Integer, String> featureValueEntry = temp._1;
        int featureIndex = featureValueEntry.getKey();
        String featureValue = featureValueEntry.getValue();
        long featureValueCount = temp._2;

        if (!featureValueMapping.containsKey(featureIndex)) {
            Builder<String, Long> mapBuilder = new Builder<String, Long>(Ordering.natural());

            featureValueMapping.put(featureIndex, mapBuilder);
        }

        featureValueMapping.get(featureIndex).put(featureValue, featureValueCount);
    }

    Preconditions.checkArgument(featureValueMapping.size() == listOfAllFeatureIndices.size());

    String outputFeaturePathString = outputPathString + "feature" + Settings.SEPERATOR;
    fs.mkdirs(new Path(outputFeaturePathString));

    String outputFeatureNamePathString = outputPathString + "feature.dat";
    Path outputFeatureNamePath = new Path(outputFeatureNamePathString);
    PrintWriter featureNamePrinterWriter = new PrintWriter(fs.create(outputFeatureNamePath), true);

    List<Integer> listOfFeatureIndicesToKeep = new LinkedList<Integer>();

    Map<Integer, Map<String, Integer>> featureValueIndex = new Hashtable<Integer, Map<String, Integer>>();
    for (int d = 0; d < featureValueMapping.size(); d++) {
        Map<String, Integer> valueToIndex = new Hashtable<String, Integer>();
        Map<Integer, String> indexToValue = new Hashtable<Integer, String>();

        ImmutableSortedMap<String, Long> immutableSortedMap = featureValueMapping.get(d).build();
        for (String keyString : immutableSortedMap.keySet()) {
            valueToIndex.put(keyString, valueToIndex.size());
            indexToValue.put(indexToValue.size(), keyString);
        }

        if (valueToIndex.size() <= minCutoffThreshold || valueToIndex.size() > maxCutoffThreshold) {
            sLogger.info("Feature (" + listOfAllFeatureInfo.get(d) + ") contains " + valueToIndex.size()
                    + " values, skip...");

            continue;
        } else {
            sLogger.info("Feature (" + listOfAllFeatureInfo.get(d) + ") contains " + valueToIndex.size()
                    + " values.");

            listOfFeatureIndicesToKeep.add(listOfAllFeatureIndices.get(d));
            featureNamePrinterWriter.println(listOfAllFeatureInfo.get(d));
        }

        String outputFeatureIndexPathString = outputFeaturePathString + "index" + Settings.UNDER_SCORE
                + featureValueIndex.size() + ".dat";
        Path outputIndexPath = new Path(outputFeatureIndexPathString);

        featureValueIndex.put(featureValueIndex.size(), valueToIndex);

        PrintWriter featureValueIndexPrinterWriter = new PrintWriter(fs.create(outputIndexPath), true);
        for (int i = 0; i < indexToValue.size(); i++) {
            featureValueIndexPrinterWriter.println("" + i + Settings.TAB + indexToValue.get(i) + Settings.TAB
                    + immutableSortedMap.get(indexToValue.get(i)));
        }
        featureValueIndexPrinterWriter.close();
    }

    featureNamePrinterWriter.close();

    JavaRDD<String[]> filteredLines = rawLines.map(new LineFilter(listOfFeatureIndicesToKeep));
    JavaRDD<FeatureIntegerVector> indexedData = filteredLines.map(new FeatureValueIndexer(featureValueIndex));

    String outputDataPathString = outputPathString + "data";
    Path outputDataPath = new Path(outputDataPathString);
    if (fs.exists(outputDataPath)) {
        fs.delete(outputDataPath, true);
    }
    indexedData.saveAsTextFile(outputDataPathString);

    sc.stop();
}

From source file:org.opendaylight.controller.sal.binding.impl.forward.DomForwardingUtils.java

public static void reuseForwardingFrom(Object target, Object source) {
    Preconditions.checkArgument(isDomForwardedBroker(source));
    Preconditions.checkArgument(isDomForwardedBroker(target));
    DomForwardedBroker forwardedSource = (DomForwardedBroker) source;
    DomForwardedBroker forwardedTarget = (DomForwardedBroker) target;
    reuseForwardingFrom(forwardedTarget, forwardedSource);

}

From source file:org.opendaylight.openflowplugin.testcommon.DropTestUtils.java

public static String macToString(final byte[] mac) {
    Preconditions.checkArgument(mac.length == 6);

    final StringBuilder sb = new StringBuilder(17);
    appendByte(sb, mac[0]);//from  ww  w  . j  ava2s. c  om

    for (int i = 1; i < mac.length; i++) {
        sb.append(':');
        appendByte(sb, mac[i]);
    }

    return sb.toString();
}

From source file:org.opendaylight.protocol.util.StatisticsUtil.java

/**
 * Formats elapsed time in seconds to form days:hours:minutes:seconds.
 *
 * @param seconds Elapsed time in seconds.
 * @return Formated time as string d:hh:mm:ss
 *//* w w w .j  a  va 2s.  c  om*/
public static String formatElapsedTime(final long seconds) {
    Preconditions.checkArgument(seconds >= 0);
    return String.format("%1d:%02d:%02d:%02d", TimeUnit.SECONDS.toDays(seconds),
            TimeUnit.SECONDS.toHours(seconds) - TimeUnit.DAYS.toHours(TimeUnit.SECONDS.toDays(seconds)),
            TimeUnit.SECONDS.toMinutes(seconds) - TimeUnit.HOURS.toMinutes(TimeUnit.SECONDS.toHours(seconds)),
            seconds - TimeUnit.MINUTES.toSeconds(TimeUnit.SECONDS.toMinutes(seconds)));
}

From source file:org.opendaylight.controller.cluster.datastore.messages.AbortTransaction.java

public static AbortTransaction fromSerializable(Object serializable) {
    Preconditions.checkArgument(serializable instanceof AbortTransaction);
    return (AbortTransaction) serializable;
}

From source file:org.opendaylight.controller.cluster.datastore.messages.CommitTransaction.java

public static CommitTransaction fromSerializable(Object serializable) {
    Preconditions.checkArgument(serializable instanceof CommitTransaction);
    return (CommitTransaction) serializable;
}