Example usage for com.google.common.collect Multimap keySet

List of usage examples for com.google.common.collect Multimap keySet

Introduction

In this page you can find the example usage for com.google.common.collect Multimap keySet.

Prototype

Set<K> keySet();

Source Link

Document

Returns a view collection of all distinct keys contained in this multimap.

Usage

From source file:com.ironiacorp.network.tool.nf.NetworkFreud.java

public static void main(String[] args) {
    ArpDatabase database = new ArpDatabase();
    ArpwatchParser parser = new ArpwatchParser();
    List<Tuple<MacAddress, InetAddress>> list = parser.parse();
    Iterator<Tuple<MacAddress, InetAddress>> i = list.iterator();
    while (i.hasNext()) {
        Tuple<MacAddress, InetAddress> tuple = i.next();
        database.add(tuple.getT(), tuple.getU());
    }/*from  ww w.j ava 2 s .  co m*/
    Multimap<InetAddress, MacAddress> bogusIP = database.findBogusIP();
    Iterator<InetAddress> ips = bogusIP.keySet().iterator();
    while (ips.hasNext()) {
        InetAddress ip = ips.next();
        System.out.println(ip.toString());
        Collection<MacAddress> macs = bogusIP.get(ip);
        Iterator<MacAddress> j = macs.iterator();
        while (j.hasNext()) {
            MacAddress mac = j.next();
            System.out.println("\t" + mac.toString());
        }
    }
}

From source file:org.apache.cassandra.tools.SSTableExpiredBlockers.java

public static void main(String[] args) throws IOException {
    PrintStream out = System.out;
    if (args.length < 2) {
        out.println("Usage: sstableexpiredblockers <keyspace> <table>");
        System.exit(1);/*  ww w  .j a  va  2s . com*/
    }

    Util.initDatabaseDescriptor();

    String keyspace = args[args.length - 2];
    String columnfamily = args[args.length - 1];
    Schema.instance.loadFromDisk(false);

    CFMetaData metadata = Schema.instance.getCFMetaData(keyspace, columnfamily);
    if (metadata == null)
        throw new IllegalArgumentException(
                String.format("Unknown keyspace/table %s.%s", keyspace, columnfamily));

    Keyspace ks = Keyspace.openWithoutSSTables(keyspace);
    ColumnFamilyStore cfs = ks.getColumnFamilyStore(columnfamily);
    Directories.SSTableLister lister = cfs.directories.sstableLister().skipTemporary(true);
    Set<SSTableReader> sstables = new HashSet<>();
    for (Map.Entry<Descriptor, Set<Component>> sstable : lister.list().entrySet()) {
        if (sstable.getKey() != null) {
            try {
                SSTableReader reader = SSTableReader.open(sstable.getKey());
                sstables.add(reader);
            } catch (Throwable t) {
                out.println("Couldn't open sstable: " + sstable.getKey().filenameFor(Component.DATA) + " ("
                        + t.getMessage() + ")");
            }
        }
    }
    if (sstables.isEmpty()) {
        out.println("No sstables for " + keyspace + "." + columnfamily);
        System.exit(1);
    }

    int gcBefore = (int) (System.currentTimeMillis() / 1000) - metadata.getGcGraceSeconds();
    Multimap<SSTableReader, SSTableReader> blockers = checkForExpiredSSTableBlockers(sstables, gcBefore);
    for (SSTableReader blocker : blockers.keySet()) {
        out.println(String.format("%s blocks %d expired sstables from getting dropped: %s%n",
                formatForExpiryTracing(Collections.singleton(blocker)), blockers.get(blocker).size(),
                formatForExpiryTracing(blockers.get(blocker))));
    }

    System.exit(0);
}

From source file:com.ikanow.aleph2.analytics.spark.assets.SparkSqlTopology.java

public static void main(String[] args)
        throws InstantiationException, IllegalAccessException, ClassNotFoundException {

    try {/*from ww  w.j av a2  s. c om*/
        final Tuple2<IAnalyticsContext, Optional<ProcessingTestSpecBean>> aleph2_tuple = SparkTechnologyUtils
                .initializeAleph2(args);
        final IAnalyticsContext context = aleph2_tuple._1();
        final Optional<ProcessingTestSpecBean> test_spec = aleph2_tuple._2();

        // Optional: make really really sure it exists after the specified timeout
        SparkTechnologyUtils.registerTestTimeout(test_spec, () -> {
            System.exit(0);
        });

        final SparkTopologyConfigBean config = BeanTemplateUtils
                .from(context.getJob().map(job -> job.config()).orElse(Collections.emptyMap()),
                        SparkTopologyConfigBean.class)
                .get();
        final String sql_string = Optional.ofNullable(config.script()).orElse("");

        //INFO:
        System.out.println("Starting SparkSqlTopology");

        SparkConf spark_context = new SparkConf().setAppName("SparkSqlTopology");

        final Optional<Double> sub_sample = test_spec
                .map(__ -> Optional.ofNullable(spark_context.getDouble(SUBSAMPLE_TEST, -1)))
                .orElseGet(() -> Optional.ofNullable(spark_context.getDouble(SUBSAMPLE_NORMAL, -1)))
                .filter(d -> d > 0);

        //INFO:
        sub_sample.ifPresent(d -> System.out.println("OPTIONS: sub_sample = " + d));
        test_spec.ifPresent(spec -> System.out
                .println("OPTIONS: test_spec = " + BeanTemplateUtils.toJson(spec).toString()));
        System.out.println("OPTIONS: sql = " + sql_string);

        //DEBUG
        //final boolean test_mode = test_spec.isPresent(); // (serializable thing i can pass into the map)

        try (final JavaSparkContext jsc = new JavaSparkContext(spark_context)) {

            SQLContext sql_context = new SQLContext(jsc);

            final Multimap<String, DataFrame> inputs = SparkTechnologyUtils.buildBatchSparkSqlInputs(context,
                    test_spec, sql_context, Collections.emptySet());

            //INFO
            System.out.println("Registered tables = " + inputs.keySet().toString());

            final DataFrame filtered_df = sql_context.sql(sql_string);
            final String[] columns = filtered_df.columns(); // (have to do this here because columns() depends on transient code)

            final long written = filtered_df.javaRDD().map(row -> {
                final ObjectNode j = _mapper.createObjectNode(); //.put("message", row.toString()); (Don't think we want this now that we're using the columns)
                for (int ii = 0; ii < row.length(); ++ii) {
                    j.set(columns[ii], _mapper.convertValue(row.get(ii), JsonNode.class));
                }
                return context.emitObject(Optional.empty(), context.getJob().get(), Either.left(j),
                        Optional.empty());
            }).count();

            //INFO:
            System.out.println("Wrote: data_objects=" + written);
        }
    } catch (Throwable t) {
        System.out.println(ErrorUtils.getLongForm("ERROR: {0}", t));
    }
}

From source file:com.github.rinde.vanlon15prima.PerformExperiment.java

public static void main(String[] args) {
    final long time = System.currentTimeMillis();
    final Experiment.Builder experimentBuilder = Experiment.build(SUM).computeLocal().withRandomSeed(123)
            .withThreads(Runtime.getRuntime().availableProcessors()).repeat(1)
            .addScenarios(FileProvider.builder().add(Paths.get(DATASET)).filter("glob:**[09].scen"))
            .addResultListener(new CommandLineProgress(System.out))
            .usePostProcessor(PostProcessors.statisticsPostProcessor())
            // central: cheapest insertion configuration
            .addConfiguration(//from   w  w  w .  ja va  2  s  .  c o m
                    Central.solverConfiguration(CheapestInsertionHeuristic.supplier(SUM), "CheapInsert"))
            // central: random
            .addConfiguration(Central.solverConfiguration(RandomSolver.supplier()))
            // mas: auction cheapest insertion with 2-opt per vehicle
            .addConfiguration(MASConfiguration.pdptwBuilder().setName("Auction-R-opt2cih-B-cih")
                    .addEventHandler(AddVehicleEvent.class, new VehicleHandler(
                            SolverRoutePlanner.supplier(
                                    Opt2.breadthFirstSupplier(CheapestInsertionHeuristic.supplier(SUM), SUM)),
                            SolverBidder.supplier(SUM, CheapestInsertionHeuristic.supplier(SUM))))
                    .addModel(SolverModel.builder()).addModel(AuctionCommModel.builder()).build());

    final Optional<ExperimentResults> results = experimentBuilder.perform(System.out, args);
    final long duration = System.currentTimeMillis() - time;
    if (!results.isPresent()) {
        return;
    }

    System.out.println("Done, computed " + results.get().getResults().size() + " simulations in "
            + duration / 1000d + "s");

    final Multimap<MASConfiguration, SimulationResult> groupedResults = LinkedHashMultimap.create();
    for (final SimulationResult sr : results.get().sortedResults()) {
        groupedResults.put(sr.getSimArgs().getMasConfig(), sr);
    }

    for (final MASConfiguration config : groupedResults.keySet()) {
        final Collection<SimulationResult> group = groupedResults.get(config);

        final File configResult = new File(RESULTS + config.getName() + ".csv");
        try {
            Files.createParentDirs(configResult);
        } catch (final IOException e1) {
            throw new IllegalStateException(e1);
        }
        // deletes the file in case it already exists
        configResult.delete();
        try {
            Files.append(
                    "dynamism,urgency,scale,cost,travel_time,tardiness,over_time,is_valid,scenario_id,random_seed,comp_time,num_vehicles,num_orders\n",
                    configResult, Charsets.UTF_8);
        } catch (final IOException e1) {
            throw new IllegalStateException(e1);
        }

        for (final SimulationResult sr : group) {
            final String pc = sr.getSimArgs().getScenario().getProblemClass().getId();
            final String id = sr.getSimArgs().getScenario().getProblemInstanceId();
            final int numVehicles = FluentIterable.from(sr.getSimArgs().getScenario().getEvents())
                    .filter(AddVehicleEvent.class).size();
            try {
                final String scenarioName = Joiner.on("-").join(pc, id);
                final List<String> propsStrings = Files
                        .readLines(new File(DATASET + scenarioName + ".properties"), Charsets.UTF_8);
                final Map<String, String> properties = Splitter.on("\n").withKeyValueSeparator(" = ")
                        .split(Joiner.on("\n").join(propsStrings));

                final double dynamism = Double.parseDouble(properties.get("dynamism_bin"));
                final long urgencyMean = Long.parseLong(properties.get("urgency"));
                final double scale = Double.parseDouble(properties.get("scale"));

                final StatisticsDTO stats = (StatisticsDTO) sr.getResultObject();
                final double cost = SUM.computeCost(stats);
                final double travelTime = SUM.travelTime(stats);
                final double tardiness = SUM.tardiness(stats);
                final double overTime = SUM.overTime(stats);
                final boolean isValidResult = SUM.isValidResult(stats);
                final long computationTime = stats.computationTime;

                final long numOrders = Long.parseLong(properties.get("AddParcelEvent"));

                final String line = Joiner.on(",")
                        .appendTo(new StringBuilder(),
                                asList(dynamism, urgencyMean, scale, cost, travelTime, tardiness, overTime,
                                        isValidResult, scenarioName, sr.getSimArgs().getRandomSeed(),
                                        computationTime, numVehicles, numOrders))
                        .append(System.lineSeparator()).toString();
                if (!isValidResult) {
                    System.err.println("WARNING: FOUND AN INVALID RESULT: ");
                    System.err.println(line);
                }
                Files.append(line, configResult, Charsets.UTF_8);
            } catch (final IOException e) {
                throw new IllegalStateException(e);
            }
        }
    }
}

From source file:PCC.java

/**
 * @param args the command line arguments
 * @throws java.io.IOException/*  w  ww . j a  v  a  2 s  .  co  m*/
 */

public static void main(String[] args) throws IOException {
    // TODO code application logic here

    PearsonsCorrelation corel = new PearsonsCorrelation();
    PCC method = new PCC();
    ArrayList<String> name = new ArrayList<>();
    Multimap<String, String> genes = ArrayListMultimap.create();
    BufferedWriter bw = new BufferedWriter(new FileWriter(args[1]));
    BufferedReader br = new BufferedReader(new FileReader(args[0]));
    String str;
    while ((str = br.readLine()) != null) {
        String[] a = str.split("\t");
        name.add(a[0]);
        for (int i = 1; i < a.length; i++) {
            genes.put(a[0], a[i]);
        }
    }
    for (String key : genes.keySet()) {
        double[] first = new double[genes.get(key).size()];
        int element1 = 0;
        for (String value : genes.get(key)) {
            double d = Double.parseDouble(value);
            first[element1] = d;
            element1++;
        }
        for (String key1 : genes.keySet()) {
            if (!key.equals(key1)) {
                double[] second = new double[genes.get(key1).size()];
                int element2 = 0;
                for (String value : genes.get(key1)) {
                    double d = Double.parseDouble(value);
                    second[element2] = d;
                    element2++;

                }
                double corrlation = corel.correlation(first, second);
                if (corrlation > 0.5) {
                    bw.write(key + "\t" + key1 + "\t" + corrlation + "\t"
                            + method.pvalue(corrlation, second.length) + "\n");
                }
            }
        }
    }
    br.close();
    bw.close();
}

From source file:com.github.rinde.dynurg.Experimentation.java

public static void main(String[] args) {
    System.out.println(System.getProperty("jppf.config"));

    final long time = System.currentTimeMillis();
    final Experiment.Builder experimentBuilder = Experiment.build(SUM).computeDistributed().withRandomSeed(123)
            .repeat(10).numBatches(10)// w  w  w  .ja  va  2  s .  c  o  m
            .addScenarios(
                    FileProvider.builder().add(Paths.get(DATASET)).filter("glob:**[01].[0-9]0#[0-5].scen"))
            .addResultListener(new CommandLineProgress(System.out))
            .addConfiguration(
                    Central.solverConfiguration(CheapestInsertionHeuristic.supplier(SUM), "-CheapInsert"))
            .addConfiguration(Central.solverConfiguration(CheapestInsertionHeuristic.supplier(TARDINESS),
                    "-CheapInsert-Tard"))
            .addConfiguration(Central.solverConfiguration(CheapestInsertionHeuristic.supplier(DISTANCE),
                    "-CheapInsert-Dist"))
            .addConfiguration(Central.solverConfiguration(
                    Opt2.breadthFirstSupplier(CheapestInsertionHeuristic.supplier(SUM), SUM),
                    "-bfsOpt2-CheapInsert"))
            .addConfiguration(Central.solverConfiguration(
                    Opt2.breadthFirstSupplier(CheapestInsertionHeuristic.supplier(TARDINESS), TARDINESS),
                    "-bfsOpt2-CheapInsert-Tard"))
            .addConfiguration(Central.solverConfiguration(
                    Opt2.breadthFirstSupplier(CheapestInsertionHeuristic.supplier(DISTANCE), DISTANCE),
                    "-bfsOpt2-CheapInsert-Dist"))
            .addConfiguration(Central.solverConfiguration(
                    Opt2.depthFirstSupplier(CheapestInsertionHeuristic.supplier(SUM), SUM),
                    "-dfsOpt2-CheapInsert"))
            .addConfiguration(Central.solverConfiguration(
                    Opt2.depthFirstSupplier(CheapestInsertionHeuristic.supplier(TARDINESS), TARDINESS),
                    "-dfsOpt2-CheapInsert-Tard"))
            .addConfiguration(Central.solverConfiguration(
                    Opt2.depthFirstSupplier(CheapestInsertionHeuristic.supplier(DISTANCE), DISTANCE),
                    "-dfsOpt2-CheapInsert-Dist"));

    final Menu m = ExperimentCli.createMenuBuilder(experimentBuilder)
            .add(Option.builder("nv", ArgumentParser.INTEGER).longName("number-of-vehicles")
                    .description("Changes the number of vehicles in all scenarios.").build(), experimentBuilder,
                    new ArgHandler<Experiment.Builder, Integer>() {
                        @Override
                        public void execute(Experiment.Builder subject, Optional<Integer> argument) {
                            subject.setScenarioReader(new NumVehiclesScenarioParser(argument.get()));
                        }
                    })
            .build();

    final Optional<String> error = m.safeExecute(args);
    if (error.isPresent()) {
        System.err.println(error.get());
        return;
    }
    final ExperimentResults results = experimentBuilder.perform();

    final long duration = System.currentTimeMillis() - time;
    System.out
            .println("Done, computed " + results.results.size() + " simulations in " + duration / 1000d + "s");

    final Multimap<MASConfiguration, SimulationResult> groupedResults = LinkedHashMultimap.create();
    for (final SimulationResult sr : results.sortedResults()) {
        groupedResults.put(sr.masConfiguration, sr);
    }

    for (final MASConfiguration config : groupedResults.keySet()) {
        final Collection<SimulationResult> group = groupedResults.get(config);

        final File configResult = new File(RESULTS + config.toString() + ".csv");
        try {
            Files.createParentDirs(configResult);
        } catch (final IOException e1) {
            throw new IllegalStateException(e1);
        }
        // deletes the file in case it already exists
        configResult.delete();
        try {
            Files.append(
                    "dynamism,urgency_mean,urgency_sd,cost,travel_time,tardiness,over_time,is_valid,scenario_id,random_seed,comp_time,num_vehicles\n",
                    configResult, Charsets.UTF_8);
        } catch (final IOException e1) {
            throw new IllegalStateException(e1);
        }

        for (final SimulationResult sr : group) {
            final String pc = sr.scenario.getProblemClass().getId();
            final String id = sr.scenario.getProblemInstanceId();
            final int numVehicles = FluentIterable.from(sr.scenario.asList()).filter(AddVehicleEvent.class)
                    .size();
            try {
                final List<String> propsStrings = Files
                        .readLines(new File("files/dataset/" + pc + id + ".properties"), Charsets.UTF_8);
                final Map<String, String> properties = Splitter.on("\n").withKeyValueSeparator(" = ")
                        .split(Joiner.on("\n").join(propsStrings));

                final double dynamism = Double.parseDouble(properties.get("dynamism"));
                final double urgencyMean = Double.parseDouble(properties.get("urgency_mean"));
                final double urgencySd = Double.parseDouble(properties.get("urgency_sd"));

                final double cost = SUM.computeCost(sr.stats);
                final double travelTime = SUM.travelTime(sr.stats);
                final double tardiness = SUM.tardiness(sr.stats);
                final double overTime = SUM.overTime(sr.stats);
                final boolean isValidResult = SUM.isValidResult(sr.stats);
                final long computationTime = sr.stats.computationTime;

                final String line = Joiner.on(",")
                        .appendTo(new StringBuilder(),
                                asList(dynamism, urgencyMean, urgencySd, cost, travelTime, tardiness, overTime,
                                        isValidResult, pc + id, sr.seed, computationTime, numVehicles))
                        .append(System.lineSeparator()).toString();
                if (!isValidResult) {
                    System.err.println("WARNING: FOUND AN INVALID RESULT: ");
                    System.err.println(line);
                }
                Files.append(line, configResult, Charsets.UTF_8);
            } catch (final IOException e) {
                throw new IllegalStateException(e);
            }
        }
    }
}

From source file:org.apache.ctakes.temporal.data.analysis.PrintInconsistentAnnotations.java

public static void main(String[] args) throws Exception {
    Options options = CliFactory.parseArguments(Options.class, args);
    int windowSize = 50;

    List<Integer> patientSets = options.getPatients().getList();
    List<Integer> trainItems = THYMEData.getPatientSets(patientSets, THYMEData.TRAIN_REMAINDERS);
    List<File> files = THYMEData.getFilesFor(trainItems, options.getRawTextDirectory());

    CollectionReader reader = UriCollectionReader.getCollectionReaderFromFiles(files);
    AggregateBuilder aggregateBuilder = new AggregateBuilder();
    aggregateBuilder.add(UriToDocumentTextAnnotator.getDescription());
    aggregateBuilder.add(AnalysisEngineFactory.createEngineDescription(XMIReader.class,
            XMIReader.PARAM_XMI_DIRECTORY, options.getXMIDirectory()));

    int totalDocTimeRels = 0;
    int totalInconsistentDocTimeRels = 0;
    for (Iterator<JCas> casIter = new JCasIterator(reader, aggregateBuilder.createAggregate()); casIter
            .hasNext();) {//from  ww w .ja  va  2 s  .  c o m
        JCas jCas = casIter.next();
        String text = jCas.getDocumentText();
        JCas goldView = jCas.getView("GoldView");

        // group events by their narrative container
        Multimap<Annotation, EventMention> containers = HashMultimap.create();
        for (TemporalTextRelation relation : JCasUtil.select(goldView, TemporalTextRelation.class)) {
            if (relation.getCategory().equals("CONTAINS")) {
                Annotation arg1 = relation.getArg1().getArgument();
                Annotation arg2 = relation.getArg2().getArgument();
                if (arg2 instanceof EventMention) {
                    EventMention event = (EventMention) arg2;
                    containers.put(arg1, event);
                }
            }
        }

        // check each container for inconsistent DocTimeRels
        for (Annotation container : containers.keySet()) {
            Set<String> docTimeRels = Sets.newHashSet();
            for (EventMention event : containers.get(container)) {
                docTimeRels.add(event.getEvent().getProperties().getDocTimeRel());
            }
            totalDocTimeRels += docTimeRels.size();

            boolean inconsistentDocTimeRels;
            if (container instanceof EventMention) {
                EventMention mention = ((EventMention) container);
                String containerDocTimeRel = mention.getEvent().getProperties().getDocTimeRel();
                inconsistentDocTimeRels = false;
                for (String docTimeRel : docTimeRels) {
                    if (docTimeRel.equals(containerDocTimeRel)) {
                        continue;
                    }
                    if (containerDocTimeRel.equals("BEFORE/OVERLAP")
                            && (docTimeRel.equals("BEFORE") || docTimeRel.equals("OVERLAP"))) {
                        continue;
                    }
                    inconsistentDocTimeRels = true;
                    break;
                }
            } else {
                if (docTimeRels.size() == 1) {
                    inconsistentDocTimeRels = false;
                } else if (docTimeRels.contains("BEFORE/OVERLAP")) {
                    inconsistentDocTimeRels = docTimeRels.size() == 1
                            && (docTimeRels.contains("BEFORE") || docTimeRels.contains("OVERLAP"));
                } else {
                    inconsistentDocTimeRels = true;
                }
            }

            // if inconsistent: print events, DocTimeRels and surrounding context
            if (inconsistentDocTimeRels) {
                totalInconsistentDocTimeRels += docTimeRels.size();

                List<Integer> offsets = Lists.newArrayList();
                offsets.add(container.getBegin());
                offsets.add(container.getEnd());
                for (EventMention event : containers.get(container)) {
                    offsets.add(event.getBegin());
                    offsets.add(event.getEnd());
                }
                Collections.sort(offsets);
                int begin = Math.max(offsets.get(0) - windowSize, 0);
                int end = Math.min(offsets.get(offsets.size() - 1) + windowSize, text.length());
                System.err.printf("Inconsistent DocTimeRels in %s, ...%s...\n",
                        new File(ViewUriUtil.getURI(jCas)).getName(),
                        text.substring(begin, end).replaceAll("([\r\n])[\r\n]+", "$1"));
                if (container instanceof EventMention) {
                    System.err.printf("Container: \"%s\" (docTimeRel=%s)\n", container.getCoveredText(),
                            ((EventMention) container).getEvent().getProperties().getDocTimeRel());
                } else {
                    System.err.printf("Container: \"%s\"\n", container.getCoveredText());
                }
                Ordering<EventMention> byBegin = Ordering.natural()
                        .onResultOf(new Function<EventMention, Integer>() {
                            @Override
                            public Integer apply(@Nullable EventMention event) {
                                return event.getBegin();
                            }
                        });
                for (EventMention event : byBegin.sortedCopy(containers.get(container))) {
                    System.err.printf("* \"%s\" (docTimeRel=%s)\n", event.getCoveredText(),
                            event.getEvent().getProperties().getDocTimeRel());
                }
                System.err.println();
            }
        }
    }

    System.err.printf("Inconsistent DocTimeRels: %.1f%% (%d/%d)\n",
            100.0 * totalInconsistentDocTimeRels / totalDocTimeRels, totalInconsistentDocTimeRels,
            totalDocTimeRels);
}

From source file:it.units.malelab.ege.MappingPropertiesExperimenter.java

public static void main(String[] args) throws IOException, InterruptedException, ExecutionException {
    final int n = 10000;
    final int nDist = 10000;
    //prepare problems and methods
    List<String> problems = Lists.newArrayList("bool-parity5", "bool-mopm3", "sr-keijzer6", "sr-nguyen7",
            "sr-pagie1", "sr-vladislavleva4", "other-klandscapes3", "other-klandscapes7", "other-text");
    List<String> mappers = new ArrayList<>();
    for (int gs : new int[] { 64, 128, 256, 512, 1024 }) {
        mappers.add("ge-" + gs + "-2");
        mappers.add("ge-" + gs + "-4");
        mappers.add("ge-" + gs + "-8");
        mappers.add("ge-" + gs + "-12");
        mappers.add("pige-" + gs + "-4");
        mappers.add("pige-" + gs + "-8");
        mappers.add("pige-" + gs + "-16");
        mappers.add("pige-" + gs + "-24");
        mappers.add("hge-" + gs + "-0");
        mappers.add("whge-" + gs + "-2");
        mappers.add("whge-" + gs + "-3");
        mappers.add("whge-" + gs + "-5");
    }// www . j a v a 2  s .  c om
    mappers.add("sge-0-5");
    mappers.add("sge-0-6");
    mappers.add("sge-0-7");
    mappers.add("sge-0-8");
    mappers.clear();
    mappers.addAll(Lists.newArrayList("ge-1024-8", "pige-1024-16", "hge-1024-0", "whge-1024-3", "sge-0-6"));
    PrintStream filePrintStream = null;
    if (args.length > 0) {
        filePrintStream = new PrintStream(args[0]);
    } else {
        filePrintStream = System.out;
    }
    filePrintStream.printf("problem;mapper;genotypeSize;param;property;value%n");
    //prepare distances
    Distance<Node<String>> phenotypeDistance = new CachedDistance<>(new LeavesEdit<String>());
    Distance<Sequence> genotypeDistance = new CachedDistance<>(new Hamming());
    //iterate
    for (String problemName : problems) {
        for (String mapperName : mappers) {
            System.out.printf("%20.20s, %20.20s", problemName, mapperName);
            //build problem
            Problem<String, NumericFitness> problem = null;
            if (problemName.equals("bool-parity5")) {
                problem = new Parity(5);
            } else if (problemName.equals("bool-mopm3")) {
                problem = new MultipleOutputParallelMultiplier(3);
            } else if (problemName.equals("sr-keijzer6")) {
                problem = new HarmonicCurve();
            } else if (problemName.equals("sr-nguyen7")) {
                problem = new Nguyen7(1);
            } else if (problemName.equals("sr-pagie1")) {
                problem = new Pagie1();
            } else if (problemName.equals("sr-vladislavleva4")) {
                problem = new Vladislavleva4(1);
            } else if (problemName.equals("other-klandscapes3")) {
                problem = new KLandscapes(3);
            } else if (problemName.equals("other-klandscapes7")) {
                problem = new KLandscapes(7);
            } else if (problemName.equals("other-text")) {
                problem = new Text();
            }
            //build configuration and evolver
            Mapper mapper = null;
            int genotypeSize = Integer.parseInt(mapperName.split("-")[1]);
            int mapperMainParam = Integer.parseInt(mapperName.split("-")[2]);
            if (mapperName.split("-")[0].equals("ge")) {
                mapper = new StandardGEMapper<>(mapperMainParam, 1, problem.getGrammar());
            } else if (mapperName.split("-")[0].equals("pige")) {
                mapper = new PiGEMapper<>(mapperMainParam, 1, problem.getGrammar());
            } else if (mapperName.split("-")[0].equals("sge")) {
                mapper = new SGEMapper<>(mapperMainParam, problem.getGrammar());
            } else if (mapperName.split("-")[0].equals("hge")) {
                mapper = new HierarchicalMapper<>(problem.getGrammar());
            } else if (mapperName.split("-")[0].equals("whge")) {
                mapper = new WeightedHierarchicalMapper<>(mapperMainParam, false, true, problem.getGrammar());
            }
            //prepare things
            Random random = new Random(1);
            Set<Sequence> genotypes = new LinkedHashSet<>(n);
            //build genotypes
            if (mapperName.split("-")[0].equals("sge")) {
                SGEGenotypeFactory<String> factory = new SGEGenotypeFactory<>((SGEMapper) mapper);
                while (genotypes.size() < n) {
                    genotypes.add(factory.build(random));
                }
                genotypeSize = factory.getBitSize();
            } else {
                BitsGenotypeFactory factory = new BitsGenotypeFactory(genotypeSize);
                while (genotypes.size() < n) {
                    genotypes.add(factory.build(random));
                }
            }
            //build and fill map
            Multimap<Node<String>, Sequence> multimap = HashMultimap.create();
            int progress = 0;
            for (Sequence genotype : genotypes) {
                Node<String> phenotype;
                try {
                    if (mapperName.split("-")[0].equals("sge")) {
                        phenotype = mapper.map((SGEGenotype<String>) genotype, new HashMap<>());
                    } else {
                        phenotype = mapper.map((BitsGenotype) genotype, new HashMap<>());
                    }
                } catch (MappingException e) {
                    phenotype = Node.EMPTY_TREE;
                }
                multimap.put(phenotype, genotype);
                progress = progress + 1;
                if (progress % Math.round(n / 10) == 0) {
                    System.out.print(".");
                }
            }
            System.out.println();
            //compute distances
            List<Pair<Double, Double>> allDistances = new ArrayList<>();
            List<Pair<Double, Double>> allValidDistances = new ArrayList<>();
            Multimap<Node<String>, Double> genotypeDistances = ArrayListMultimap.create();
            for (Node<String> phenotype : multimap.keySet()) {
                for (Sequence genotype1 : multimap.get(phenotype)) {
                    for (Sequence genotype2 : multimap.get(phenotype)) {
                        double gDistance = genotypeDistance.d(genotype1, genotype2);
                        genotypeDistances.put(phenotype, gDistance);
                        if (genotypeDistances.get(phenotype).size() > nDist) {
                            break;
                        }
                    }
                    if (genotypeDistances.get(phenotype).size() > nDist) {
                        break;
                    }
                }
            }
            List<Map.Entry<Node<String>, Sequence>> entries = new ArrayList<>(multimap.entries());
            Collections.shuffle(entries, random);
            for (Map.Entry<Node<String>, Sequence> entry1 : entries) {
                for (Map.Entry<Node<String>, Sequence> entry2 : entries) {
                    double gDistance = genotypeDistance.d(entry1.getValue(), entry2.getValue());
                    double pDistance = phenotypeDistance.d(entry1.getKey(), entry2.getKey());
                    allDistances.add(new Pair<>(gDistance, pDistance));
                    if (!Node.EMPTY_TREE.equals(entry1.getKey()) && !Node.EMPTY_TREE.equals(entry2.getKey())) {
                        allValidDistances.add(new Pair<>(gDistance, pDistance));
                    }
                    if (allDistances.size() > nDist) {
                        break;
                    }
                }
                if (allDistances.size() > nDist) {
                    break;
                }
            }
            //compute properties
            double invalidity = (double) multimap.get(Node.EMPTY_TREE).size() / (double) genotypes.size();
            double redundancy = 1 - (double) multimap.keySet().size() / (double) genotypes.size();
            double validRedundancy = redundancy;
            if (multimap.keySet().contains(Node.EMPTY_TREE)) {
                validRedundancy = 1 - ((double) multimap.keySet().size() - 1d)
                        / (double) (genotypes.size() - multimap.get(Node.EMPTY_TREE).size());
            }
            double locality = Utils.pearsonCorrelation(allDistances);
            double validLocality = Utils.pearsonCorrelation(allValidDistances);
            double[] sizes = new double[multimap.keySet().size()];
            double[] meanGenotypeDistances = new double[multimap.keySet().size()];
            int invalidIndex = -1;
            int c = 0;
            for (Node<String> phenotype : multimap.keySet()) {
                if (Node.EMPTY_TREE.equals(phenotype)) {
                    invalidIndex = c;
                }
                sizes[c] = multimap.get(phenotype).size();
                double[] distances = new double[genotypeDistances.get(phenotype).size()];
                int k = 0;
                for (Double distance : genotypeDistances.get(phenotype)) {
                    distances[k] = distance;
                    k = k + 1;
                }
                meanGenotypeDistances[c] = StatUtils.mean(distances);
                c = c + 1;
            }
            double nonUniformity = Math.sqrt(StatUtils.variance(sizes)) / StatUtils.mean(sizes);
            double nonSynonymousity = StatUtils.mean(meanGenotypeDistances)
                    / StatUtils.mean(firsts(allDistances));
            double validNonUniformity = nonUniformity;
            double validNonSynonymousity = nonSynonymousity;
            if (invalidIndex != -1) {
                double[] validSizes = new double[multimap.keySet().size() - 1];
                double[] validMeanGenotypeDistances = new double[multimap.keySet().size() - 1];
                if (invalidIndex > 0) {
                    System.arraycopy(sizes, 0, validSizes, 0, invalidIndex);
                    System.arraycopy(meanGenotypeDistances, 0, validMeanGenotypeDistances, 0, invalidIndex);
                }
                System.arraycopy(sizes, invalidIndex + 1, validSizes, invalidIndex,
                        sizes.length - invalidIndex - 1);
                System.arraycopy(meanGenotypeDistances, invalidIndex + 1, validMeanGenotypeDistances,
                        invalidIndex, meanGenotypeDistances.length - invalidIndex - 1);
                validNonUniformity = Math.sqrt(StatUtils.variance(validSizes)) / StatUtils.mean(validSizes);
                validNonSynonymousity = StatUtils.mean(validMeanGenotypeDistances)
                        / StatUtils.mean(firsts(allValidDistances));
            }
            //compute locality
            filePrintStream.printf("%s;%s;%d;%d;invalidity;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, invalidity);
            filePrintStream.printf("%s;%s;%d;%d;redundancy;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, redundancy);
            filePrintStream.printf("%s;%s;%d;%d;validRedundancy;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, validRedundancy);
            filePrintStream.printf("%s;%s;%d;%d;locality;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, locality);
            filePrintStream.printf("%s;%s;%d;%d;validLLocality;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, validLocality);
            filePrintStream.printf("%s;%s;%d;%d;nonUniformity;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, nonUniformity);
            filePrintStream.printf("%s;%s;%d;%d;validNonUniformity;%f %n", problemName,
                    mapperName.split("-")[0], genotypeSize, mapperMainParam, validNonUniformity);
            filePrintStream.printf("%s;%s;%d;%d;nonSynonymousity;%f %n", problemName, mapperName.split("-")[0],
                    genotypeSize, mapperMainParam, nonSynonymousity);
            filePrintStream.printf("%s;%s;%d;%d;validNonSynonymousity;%f %n", problemName,
                    mapperName.split("-")[0], genotypeSize, mapperMainParam, validNonSynonymousity);
        }
    }
    if (filePrintStream != null) {
        filePrintStream.close();
    }
}

From source file:com.cloudbees.api.Main.java

public static void main(String[] args) throws Exception {

    File beesCredentialsFile = new File(System.getProperty("user.home"), ".bees/bees.config");
    Preconditions.checkArgument(beesCredentialsFile.exists(), "File %s not found", beesCredentialsFile);
    Properties beesCredentials = new Properties();
    beesCredentials.load(new FileInputStream(beesCredentialsFile));
    String apiUrl = "https://api.cloudbees.com/api";
    String apiKey = beesCredentials.getProperty("bees.api.key");
    String secret = beesCredentials.getProperty("bees.api.secret");
    BeesClient client = new BeesClient(apiUrl, apiKey, secret, "xml", "1.0");
    client.setVerbose(false);/*  w w w. j  a va  2s.c  om*/

    URL databasesUrl = Thread.currentThread().getContextClassLoader().getResource("databases.txt");
    Preconditions.checkNotNull(databasesUrl, "File 'databases.txt' NOT found in the classpath");

    Collection<String> databaseNames;
    try {
        databaseNames = Sets.newTreeSet(Resources.readLines(databasesUrl, Charsets.ISO_8859_1));
    } catch (Exception e) {
        throw Throwables.propagate(e);
    }

    databaseNames = Collections2.transform(databaseNames, new Function<String, String>() {
        @Nullable
        @Override
        public String apply(@Nullable String input) {
            // {host_db_create,<<"tco_q5rm">>,<<"TCO_q5rm">>,

            if (input == null)
                return null;

            if (input.startsWith("#"))
                return null;

            if (input.indexOf('"') == -1) {
                logger.warn("Skip invalid line {}", input);
                return null;
            }
            input = input.substring(input.indexOf('"') + 1);
            if (input.indexOf('"') == -1) {
                logger.warn("Skip invalid line {}", input);
                return null;
            }
            return input.substring(0, input.indexOf('"'));

        }
    });
    databaseNames = Collections2.filter(databaseNames, new Predicate<String>() {
        @Override
        public boolean apply(@Nullable String s) {
            return !Strings.isNullOrEmpty(s);
        }
    });

    Multimap<String, String> databasesByAccount = ArrayListMultimap.create();

    Class.forName("com.mysql.jdbc.Driver");

    for (String databaseName : databaseNames) {
        try {
            DatabaseInfo databaseInfo = client.databaseInfo(databaseName, true);
            databasesByAccount.put(databaseInfo.getOwner(), databaseInfo.getName());
            logger.debug("Evaluate " + databaseInfo.getName());

            if (true == false) {
                // Hibernate
                logger.info("Hibernate {}", databaseName);
                Map<String, String> params = new HashMap<String, String>();
                params.put("database_id", databaseName);
                String url = client.getRequestURL("database.hibernate", params);
                String response = client.executeRequest(url);
                DatabaseInfoResponse apiResponse = (DatabaseInfoResponse) client.readResponse(response);
                logger.info("DB {} status: {}", apiResponse.getDatabaseInfo().getName(),
                        apiResponse.getDatabaseInfo().getStatus());

            }
            if (true == false) {
                // Hibernate
                logger.info("Activate {}", databaseName);
                Map<String, String> params = new HashMap<String, String>();
                params.put("database_id", databaseName);
                String url = client.getRequestURL("database.activate", params);
                String response = client.executeRequest(url);
                DatabaseInfoResponse apiResponse = (DatabaseInfoResponse) client.readResponse(response);
                logger.info("DB {} status: {}", apiResponse.getDatabaseInfo().getName(),
                        apiResponse.getDatabaseInfo().getStatus());
            }

            String dbUrl = "jdbc:mysql://" + databaseInfo.getMaster() + "/" + databaseInfo.getName();
            logger.info("Connect to {} user={}", dbUrl, databaseInfo.getUsername());
            Connection cnn = DriverManager.getConnection(dbUrl, databaseInfo.getUsername(),
                    databaseInfo.getPassword());
            cnn.setAutoCommit(false);
            cnn.close();

        } catch (Exception e) {
            logger.warn("Exception for {}", databaseName, e);
        }
    }

    System.out.println("OWNERS");
    for (String account : databasesByAccount.keySet()) {
        System.out.println(account + ": " + Joiner.on(", ").join(databasesByAccount.get(account)));
    }

}

From source file:org.terasology.documentation.BindingScraper.java

/**
 * @param args (ignored)/*from www .j av  a2  s .  c  om*/
 * @throws Exception if the module environment cannot be loaded
 */
public static void main(String[] args) throws Exception {
    ModuleManager moduleManager = ModuleManagerFactory.create();

    // Holds normal input mappings where there is only one key
    Multimap<InputCategory, String> categories = ArrayListMultimap.create();
    Multimap<String, Input> keys = ArrayListMultimap.create();
    Map<String, String> desc = new HashMap<>();

    for (Class<?> holdingType : moduleManager.getEnvironment().getTypesAnnotatedWith(InputCategory.class)) {
        InputCategory inputCategory = holdingType.getAnnotation(InputCategory.class);
        categories.put(inputCategory, null);
        for (String button : inputCategory.ordering()) {
            categories.put(inputCategory, button);
        }
    }

    for (Class<?> buttonEvent : moduleManager.getEnvironment()
            .getTypesAnnotatedWith(RegisterBindButton.class)) {
        DefaultBinding defBinding = buttonEvent.getAnnotation(DefaultBinding.class);
        RegisterBindButton info = buttonEvent.getAnnotation(RegisterBindButton.class);

        String cat = info.category();
        String id = "engine:" + info.id();
        desc.put(id, info.description());

        if (cat.isEmpty()) {
            InputCategory inputCategory = findEntry(categories, id);
            if (inputCategory == null) {
                System.out.println("Invalid category for: " + info.id());
            }
        } else {
            InputCategory inputCategory = findCategory(categories, cat);
            if (inputCategory != null) {
                categories.put(inputCategory, id);
            } else {
                System.out.println("Invalid category for: " + info.id());
            }
        }

        if (defBinding != null) {
            // This handles bindings with just one key
            Input input = defBinding.type().getInput(defBinding.id());
            keys.put(id, input);
        } else {
            // See if there is a multi-mapping for this button
            DefaultBindings multiBinding = buttonEvent.getAnnotation(DefaultBindings.class);

            // Annotation math magic. We're expecting a DefaultBindings containing one DefaultBinding pair
            if (multiBinding != null && multiBinding.value().length == 2) {
                DefaultBinding[] bindings = multiBinding.value();
                Input primary = bindings[0].type().getInput(bindings[0].id());
                Input secondary = bindings[1].type().getInput(bindings[1].id());
                keys.put(id, primary);
                keys.put(id, secondary);
            }
        }
    }

    for (InputCategory row : categories.keySet()) {
        System.out.println("# " + row.displayName());

        categories.get(row).stream().filter(entry -> entry != null)
                .forEach(entry -> System.out.println(desc.get(entry) + ": " + keys.get(entry)));
    }
}