Example usage for com.google.common.collect Sets newHashSet

List of usage examples for com.google.common.collect Sets newHashSet

Introduction

In this page you can find the example usage for com.google.common.collect Sets newHashSet.

Prototype

public static <E> HashSet<E> newHashSet() 

Source Link

Document

Creates a mutable, initially empty HashSet instance.

Usage

From source file:hu.akarnokd.experiments.DependencyVerify.java

public static void main(String[] args) {
    // Java 8/*  w  w w  .  ja  v  a 2  s  .c om*/
    Runnable r = () -> System.out.println("Hello world!");
    r.run();

    // Guava
    System.out.println(Sets.newHashSet());

    // Trove
    System.out.println(new TIntArrayList());

    // RxJava
    Observable.just(1).subscribe(System.out::println, Throwable::printStackTrace, r::run);
}

From source file:co.cask.cdap.common.conf.ConfigurationJsonTool.java

public static void main(String[] args) {

    String programName = System.getProperty("script", "ConfigurationJsonTool");
    Set<String> validArgument = Sets.newHashSet();
    validArgument.add(CDAP_CONFIG);/*from  w  w w  .  j a va 2 s.  c o m*/
    validArgument.add(SECURITY_CONFIG);
    if (args.length != 1 || !(validArgument.contains(args[0]))) {
        System.err.println(String.format("Usage: %s (%s | %s)", programName, CDAP_CONFIG, SECURITY_CONFIG));
        System.exit(1);
    }
    exportToJson(args[0], System.out);
}

From source file:reconcile.hbase.query.ScanColumnQualifiers.java

public static void main(String[] args) {
    try {/*from   w w w  . j  ava2 s  .c  om*/
        if (args.length < 2) {
            System.out.println("Usage: <table> [<column family>]+ ");
            System.out.println("Note: hbase config must be in classpath");
            return;
        }

        // First the setup work
        table = args[0];
        columnFamily = Sets.newHashSet();
        for (int i = 1; i < args.length; i++) {
            columnFamily.add(args[i]);
        }

        Configuration config = HBaseConfiguration.create();
        HBaseAdmin admin = new HBaseAdmin(config);

        if (!admin.tableExists(table.getBytes())) {
            System.out.println("table does not exist: " + table);
            return;
        }

        HTable myTable = new HTable(config, table.getBytes());
        System.out.println("scanning full table:");
        Scan s = new Scan();
        for (String family : columnFamily) {
            s.addFamily(family.getBytes());
        }

        ResultScanner scanner = myTable.getScanner(s);
        printRow(scanner);

        // fin~
    } catch (MasterNotRunningException e) {
        e.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    }

}

From source file:co.cask.cdap.ui.ConfigurationJsonTool.java

public static void main(String[] args) {
    String programName = System.getProperty("script", "ConfigurationJsonTool");
    Set<String> validArgument = Sets.newHashSet();
    validArgument.add(CDAP_CONFIG);//from  w w w  .ja  va  2  s .  c o  m
    validArgument.add(SECURITY_CONFIG);
    if (args.length != 1 || !(validArgument.contains(args[0]))) {
        System.err.println(String.format("Usage: %s (%s | %s)", programName, CDAP_CONFIG, SECURITY_CONFIG));
        System.exit(1);
    }
    exportToJson(args[0], System.out);
}

From source file:org.pshdl.model.types.builtIn.busses.memorymodel.MemoryModel.java

public static void main(String[] args) throws Exception {
    final File file = new File(args[0]);
    final Set<Problem> problems = Sets.newHashSet();
    final Unit unit = MemoryModelAST.parseUnit(Files.toString(file, Charsets.UTF_8), problems, 0);
    System.out.println(unit);//from  ww w. j  av a2  s.co  m
    final List<Row> rows = buildRows(unit);
    final byte[] builtHTML = MemoryModelSideFiles.builtHTML(unit, rows, true);
    if (builtHTML == null)
        throw new IllegalArgumentException("buildHTML returned null");
    System.out.println(new BusAccess().generateAccessC(rows, true));
    System.out.println(new BusAccess().generateAccessH(unit, rows, true));
    // // SideFile[] cFiles = MemoryModelSideFiles.getCFiles(unit, rows);
    // for (SideFile sideFile : cFiles) {
    // System.out.println(sideFile.relPath);
    // System.out.println(new String(sideFile.contents));
    // }
    Files.write(builtHTML, new File(args[0] + "Map.html"));
    final HDLInterface hdi = buildHDLInterface(unit, rows);
    System.out.println(hdi);
}

From source file:com.wrmsr.nativity.x86.App.java

public static void main(String[] args) throws Exception {
    logger.info("hi");

    Document doc;//from  w  w  w .j a v a  2 s  .com
    try (InputStream is = App.class.getClassLoader().getResourceAsStream("x86reference.xml")) {
        DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
        dbFactory.setFeature("http://apache.org/xml/features/nonvalidating/load-dtd-grammar", false);
        dbFactory.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false);
        DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
        doc = dBuilder.parse(is);
    }

    //optional, but recommended
    //read this - http://stackoverflow.com/questions/13786607/normalization-in-dom-parsing-with-java-how-does-it-work
    doc.getDocumentElement().normalize();

    List<Ref.Entry> entries = Lists.newArrayList();
    Ref.Parsing.parseRoot(doc, entries);
    ByteTrie<Ref.Entry> trie = DisImpl.buildTrie(entries);

    System.out.println(trie.toDetailedString());
    System.out.println();
    System.out.println();

    // Dis.run(trie);

    Ordering<Pair<Ref.Operand.Type, Ref.Operand.Address>> ord = Ordering.from((o1, o2) -> {
        int c = ObjectUtils.compare(o1.getLeft(), o2.getLeft());
        if (c == 0) {
            c = ObjectUtils.compare(o1.getRight(), o2.getRight());
        }
        return c;
    });

    Set<Pair<Ref.Operand.Type, Ref.Operand.Address>> set = Sets.newHashSet();
    for (Ref.Entry entry : entries) {
        for (Ref.Syntax syntax : entry.getSyntaxes()) {
            for (Ref.Operand operand : syntax.getOperands()) {
                set.add(new ImmutablePair<>(operand.type, operand.address));
            }
        }
    }
    for (Pair<Ref.Operand.Type, Ref.Operand.Address> pair : ord.sortedCopy(set)) {
        System.out.println(pair);
    }
    System.out.println("\n");

    DisImpl.run(trie);
}

From source file:org.elasticsearch.stresstest.get.MGetStress1.java

public static void main(String[] args) throws Exception {
    final int NUMBER_OF_NODES = 2;
    final int NUMBER_OF_DOCS = 50000;
    final int MGET_BATCH = 1000;

    Node[] nodes = new Node[NUMBER_OF_NODES];
    for (int i = 0; i < nodes.length; i++) {
        nodes[i] = NodeBuilder.nodeBuilder().node();
    }//  www  .j  a  v  a  2  s  .  co m

    System.out.println("---> START Indexing initial data [" + NUMBER_OF_DOCS + "]");
    final Client client = nodes[0].client();
    for (int i = 0; i < NUMBER_OF_DOCS; i++) {
        client.prepareIndex("test", "type", Integer.toString(i)).setSource("field", "value").execute()
                .actionGet();
    }
    System.out.println("---> DONE Indexing initial data [" + NUMBER_OF_DOCS + "]");

    final AtomicBoolean done = new AtomicBoolean();
    // start indexer
    Thread indexer = new Thread(new Runnable() {
        @Override
        public void run() {
            while (!done.get()) {
                client.prepareIndex("test", "type",
                        Integer.toString(ThreadLocalRandom.current().nextInt(NUMBER_OF_DOCS)))
                        .setSource("field", "value").execute().actionGet();
            }
        }
    });
    indexer.start();
    System.out.println("---> Starting indexer");

    // start the mget one
    Thread mget = new Thread(new Runnable() {
        @Override
        public void run() {
            while (!done.get()) {
                Set<String> ids = Sets.newHashSet();
                for (int i = 0; i < MGET_BATCH; i++) {
                    ids.add(Integer.toString(ThreadLocalRandom.current().nextInt(NUMBER_OF_DOCS)));
                }
                //System.out.println("---> mget for [" + ids.size() + "]");
                MultiGetResponse response = client.prepareMultiGet().add("test", "type", ids).execute()
                        .actionGet();
                int expected = ids.size();
                int count = 0;
                for (MultiGetItemResponse item : response) {
                    count++;
                    if (item.isFailed()) {
                        System.err.println("item failed... " + item.getFailure());
                    } else {
                        boolean removed = ids.remove(item.getId());
                        if (!removed) {
                            System.err.println("got id twice " + item.getId());
                        }
                    }
                }
                if (expected != count) {
                    System.err.println("Expected [" + expected + "], got back [" + count + "]");
                }
            }
        }
    });
    mget.start();
    System.out.println("---> Starting mget");

    Thread.sleep(TimeValue.timeValueMinutes(10).millis());

    done.set(true);
}

From source file:org.gbif.refine.datasets.nhmd.RooftopBugs.java

public static void main(String[] args) throws IOException {
    // load list of all taxa
    names = loadTaxaList();/*from   ww w .j ava 2 s  .  c o m*/
    LOG.info("Loaded " + names.size() + " unique canonical names.");

    // set of eventIDs
    events = Sets.newHashSet();

    // valid verified names not existing in GBIF Backbone Taxonomy (Nub)
    validColeopteraNamesNotInNub = Collections.unmodifiableSet(
            Sets.newHashSet("Acanthocinus griseus (Fabricius, 1792)", "Aphodius rufipes (Linnaeus, 1758)",
                    "Aphodius rufus (Moll, 1782)", "Aphodius sordidus (Fabricius, 1775)",
                    "Curculio glandium Marsham, 1802", "Curculio nucum Linnaeus, 1758",
                    "Dorytomus rufatus (Bedel, 1886)", "Dorytomus taeniatus (Fabricius, 1781)",
                    "Hylobius abietis (Linnaeus, 1758)", "Magdalis barbicornis (Latreille, 1804)",
                    "Magdalis ruficornis (Linnaeus, 1758)", "Phytobius leucogaster (Marsham, 1802)"));

    // create directory where files should be written to
    File output = org.gbif.utils.file.FileUtils.createTempDir();

    // first, process all Lepidoptera records (order is important)
    processLepidoptera(output);
    LOG.info("Processing Lepidoptera_1992-2009.csv complete! " + lepidopteraEventsFileName + " and "
            + lepidopteraOccurrencesFileName + " written to: " + output.getAbsolutePath());

    // second, process all Coleoptera record
    processColeoptera(output);
    LOG.info("Processing Coleoptera_1992-2009.csv complete! " + coleopteraOccurrencesFileName + " written to: "
            + output.getAbsolutePath());
}

From source file:org.apache.mahout.knn.Vectorize20NewsGroups.java

public static void main(String[] args) throws IOException {
    String weightingCode = args[0];
    boolean normalize = weightingCode.endsWith("c");

    legalHeaders = Sets.newHashSet();
    Iterables.addAll(legalHeaders,/*from w  w  w .j av a 2 s.co m*/
            Iterables.transform(Splitter.on(",").trimResults().split(args[1]), new Function<String, String>() {
                @Override
                public String apply(String s) {
                    return s.toLowerCase();
                }
            }));

    includeQuotes = Boolean.parseBoolean(args[2]);

    CorpusWeighting cw = CorpusWeighting.parse(weightingCode);
    if (cw.needCorpusWeights()) {
        Multiset<String> wordFrequency = HashMultiset.create();
        Set<String> documents = Sets.newHashSet();
        for (String file : Arrays.asList(args).subList(4, args.length)) {
            recursivelyCount(documents, wordFrequency, new File(file));
        }
        cw.setCorpusCounts(wordFrequency, documents.size());
    }

    int dimension = Integer.parseInt(args[3]);

    Configuration conf = new Configuration();
    SequenceFile.Writer sf = SequenceFile.createWriter(FileSystem.getLocal(conf), conf, new Path("output"),
            Text.class, VectorWritable.class);
    PrintWriter csv = new PrintWriter("output.csv");
    for (String file : Arrays.asList(args).subList(4, args.length)) {
        recursivelyVectorize(csv, sf, new File(file), cw, normalize, dimension);
    }
    csv.close();
    sf.close();
}

From source file:org.apache.mahout.knn.tools.Vectorize20NewsGroups.java

public static void main(String[] args) throws IOException {
    String weightingCode = args[0];
    boolean normalize = weightingCode.endsWith("c");

    legalHeaders = Sets.newHashSet();
    Iterables.addAll(legalHeaders,/*from www  .j  av a  2  s .  c om*/
            Iterables.transform(Splitter.on(",").trimResults().split(args[1]), new Function<String, String>() {
                @Override
                public String apply(String s) {
                    return s.toLowerCase();
                }
            }));

    includeQuotes = Boolean.parseBoolean(args[2]);

    CorpusWeighting cw = CorpusWeighting.parse(weightingCode);
    if (cw.needCorpusWeights()) {
        Multiset<String> wordFrequency = HashMultiset.create();
        Set<String> documents = Sets.newHashSet();
        for (String file : Arrays.asList(args).subList(4, args.length)) {
            recursivelyCount(documents, wordFrequency, new File(file));
        }
        cw.setCorpusCounts(wordFrequency, documents.size());
    }

    int dimension = Integer.parseInt(args[3]);

    Configuration conf = new Configuration();
    SequenceFile.Writer sf = SequenceFile.createWriter(FileSystem.getLocal(conf), conf, new Path("output-file"),
            Text.class, VectorWritable.class);
    PrintWriter csv = new PrintWriter("output-file.csv");
    for (String file : Arrays.asList(args).subList(4, args.length)) {
        recursivelyVectorize(csv, sf, new File(file), cw, normalize, dimension);
    }
    csv.close();
    sf.close();
}