Example usage for org.apache.commons.lang3.tuple Pair getKey

List of usage examples for org.apache.commons.lang3.tuple Pair getKey

Introduction

In this page you can find the example usage for org.apache.commons.lang3.tuple Pair getKey.

Prototype

@Override
public final L getKey() 

Source Link

Document

Gets the key from this pair.

This method implements the Map.Entry interface returning the left element as the key.

Usage

From source file:hu.ppke.itk.nlpg.purepos.model.internal.NGramModel.java

protected void iterate(TrieNode<Integer, Integer, W> node, ArrayList<TrieNode<Integer, Integer, W>> acc) {
    acc.add(node);//from  w  w w.j  a v  a2 s.c o m
    if (node.getChildNodes() == null || node.getChildNodes().size() == 0) {
        for (W word : node.getWords().keySet()) {
            Pair<Integer, Double> max = findMax(acc, word);
            int index = max.getKey() + 1;
            if (max.getValue() != -1) {
                lambdas.set(index, lambdas.get(index) + node.getWord(word));
            }
            // logger.debug("Max:" + max + " add:" + node.getWord(word)
            // + " to:" + index + " lambdas:" + lambdas);
        }
    } else {
        for (TrieNode<Integer, Integer, W> child : node.getChildNodes().values()) {
            iterate(child, acc);

        }
    }
    acc.remove(acc.size() - 1);
}

From source file:com.acmutv.ontoqa.benchmark.extra.QuestionE03Test.java

/**
 * Tests the question-answering with parsing.
 * @throws QuestionException when the question is malformed.
 * @throws OntoqaFatalException when the question cannot be processed due to some fatal errors.
 *//*from   www  . ja  v a 2s. c  om*/
@Test
public void test_nlp_wired() throws Exception {
    final Grammar grammar = Common.getGrammar();
    final Ontology ontology = Common.getOntology();
    final Pair<Query, Answer> result = CoreController.process(QUESTION, grammar, ontology);
    final Query query = result.getKey();
    final Answer answer = result.getValue();
    LOGGER.info("Query: {}", query);
    LOGGER.info("Answer: {}", answer);
    Assert.assertTrue(QUERY_2.equals(query.toString()) || QUERY_2_bis.equals(query.toString()));
    Assert.assertEquals(ANSWER, answer);
}

From source file:com.ottogroup.bi.streaming.operator.json.insert.JsonStaticContentInsertion.java

public JsonStaticContentInsertion(final List<Pair<JsonContentReference, Serializable>> values)
        throws IllegalArgumentException {

    ///////////////////////////////////////////////////////
    // validate input
    if (values == null) // empty input "configures" the operator to work as simple "pass-through" operator
        throw new IllegalArgumentException("Missing required input");
    ///////////////////////////////////////////////////////

    for (final Pair<JsonContentReference, Serializable> v : values) {
        if (v == null)
            throw new IllegalArgumentException("Empty list elements are not permitted");
        if (v.getLeft() == null || v.getKey().getPath() == null || v.getKey().getPath().length < 1)
            throw new IllegalArgumentException("Empty content referenced are not permitted");
        if (v.getRight() == null)
            throw new IllegalArgumentException("Null is not permitted as insertion value");
        this.values.add(v);
    }// www  .j  a  va2 s . c o m
}

From source file:com.yahoo.pulsar.broker.loadbalance.impl.DeviationShedder.java

/**
 * Recommend that all of the returned bundles be unloaded based on observing excessive standard deviations according
 * to some metric./*  w w  w.j av a  2s  .  c  om*/
 * 
 * @param loadData
 *            The load data to used to make the unloading decision.
 * @param conf
 *            The service configuration.
 * @return A map from all selected bundles to the brokers on which they reside.
 */
@Override
public Map<String, String> findBundlesForUnloading(final LoadData loadData, final ServiceConfiguration conf) {
    final Map<String, String> result = new HashMap<>();
    bundleTreeSetCache.clear();
    metricTreeSetCache.clear();
    double sum = 0;
    double squareSum = 0;
    final Map<String, BrokerData> brokerDataMap = loadData.getBrokerData();

    // Treating each broker as a data point, calculate the sum and squared
    // sum of the evaluated broker metrics.
    // These may be used to calculate the standard deviation.
    for (Map.Entry<String, BrokerData> entry : brokerDataMap.entrySet()) {
        final double value = brokerValue(entry.getValue(), conf);
        sum += value;
        squareSum += value * value;
        metricTreeSetCache.add(new ImmutablePair<>(value, entry.getKey()));
    }
    // Mean cannot change by just moving around bundles.
    final double mean = sum / brokerDataMap.size();
    double standardDeviation = Math.sqrt(squareSum / brokerDataMap.size() - mean * mean);
    final double deviationThreshold = getDeviationThreshold(conf);
    String lastMostOverloaded = null;
    // While the most loaded broker is above the standard deviation
    // threshold, continue to move bundles.
    while ((metricTreeSetCache.last().getKey() - mean) / standardDeviation > deviationThreshold) {
        final Pair<Double, String> mostLoadedPair = metricTreeSetCache.last();
        final double highestValue = mostLoadedPair.getKey();
        final String mostLoaded = mostLoadedPair.getValue();

        final Pair<Double, String> leastLoadedPair = metricTreeSetCache.first();
        final double leastValue = leastLoadedPair.getKey();
        final String leastLoaded = metricTreeSetCache.first().getValue();

        if (!mostLoaded.equals(lastMostOverloaded)) {
            // Reset the bundle tree set now that a different broker is
            // being considered.
            bundleTreeSetCache.clear();
            for (String bundle : brokerDataMap.get(mostLoaded).getLocalData().getBundles()) {
                if (!result.containsKey(bundle)) {
                    // Don't consider bundles that are already going to be
                    // moved.
                    bundleTreeSetCache.add(new ImmutablePair<>(
                            bundleValue(bundle, brokerDataMap.get(mostLoaded), conf), bundle));
                }
            }
            lastMostOverloaded = mostLoaded;
        }
        boolean selected = false;
        while (!(bundleTreeSetCache.isEmpty() || selected)) {
            Pair<Double, String> mostExpensivePair = bundleTreeSetCache.pollLast();
            double loadIncurred = mostExpensivePair.getKey();
            // When the bundle is moved, we want the now least loaded server
            // to have lower overall load than the
            // most loaded server does not. Thus, we will only consider
            // moving the bundle if this condition
            // holds, and otherwise we will try the next bundle.
            if (loadIncurred + leastValue < highestValue) {
                // Update the standard deviation and replace the old load
                // values in the broker tree set with the
                // load values assuming this move took place.
                final String bundleToMove = mostExpensivePair.getValue();
                result.put(bundleToMove, mostLoaded);
                metricTreeSetCache.remove(mostLoadedPair);
                metricTreeSetCache.remove(leastLoadedPair);
                final double newHighLoad = highestValue - loadIncurred;
                final double newLowLoad = leastValue - loadIncurred;
                squareSum -= highestValue * highestValue + leastValue * leastValue;
                squareSum += newHighLoad * newHighLoad + newLowLoad * newLowLoad;
                standardDeviation = Math.sqrt(squareSum / brokerDataMap.size() - mean * mean);
                metricTreeSetCache.add(new ImmutablePair<>(newLowLoad, leastLoaded));
                metricTreeSetCache.add(new ImmutablePair<>(newHighLoad, mostLoaded));
                selected = true;
            }
        }
        if (!selected) {
            // Move on to the next broker if no bundle could be moved.
            metricTreeSetCache.pollLast();
        }
    }
    return result;
}

From source file:com.acmutv.ontoqa.benchmark.extra.QuestionE02Test.java

/**
 * Tests the question-answering with parsing.
 * @throws QuestionException when the question is malformed.
 * @throws OntoqaFatalException when the question cannot be processed due to some fatal errors.
 *//*from w ww .  j a v  a  2s.c  o  m*/
@Test
public void test_nlp() throws Exception {
    final Grammar grammar = Common.getGrammar();
    final Ontology ontology = Common.getOntology();
    final Pair<Query, Answer> result = CoreController.process(QUESTION, grammar, ontology);
    final Query query = result.getKey();
    final Answer answer = result.getValue();
    LOGGER.info("Query: {}", query);
    LOGGER.info("Answer: {}", answer);
    Assert.assertTrue(QUERY_1.equals(query.toString()) || QUERY_1_bis.equals(query.toString()));
    Assert.assertEquals(ANSWER, answer);
}

From source file:de.ks.binding.Binding.java

@SuppressWarnings("unchecked")
protected void applyModelToProperties(Object model) {
    properties.entrySet().forEach(entry -> {
        Object value = entry.getKey().getValue(model);
        @SuppressWarnings("unchecked")
        Property<Object> property = (Property<Object>) entry.getValue();
        Pair<Function, Function> converter = converters.get(property);
        if (converter != null && value != null) {
            value = converter.getKey().apply(value);
        }//from ww w .  j a va 2 s .  c om
        if (!property.isBound()) {
            property.setValue(value);
        }
    });
}

From source file:com.galenframework.speclang2.specs.SpecImageProcessor.java

@Override
public Spec process(StringCharReader reader, String contextPath) {
    List<Pair<String, String>> parameters = Expectations.commaSeparatedRepeatedKeyValues().read(reader);
    SpecImage spec = new SpecImage();
    spec.setImagePaths(new LinkedList<String>());
    spec.setStretch(false);//from ww w .j av a  2 s  .co  m
    spec.setErrorRate(GalenConfig.getConfig().getImageSpecDefaultErrorRate());
    spec.setTolerance(GalenConfig.getConfig().getImageSpecDefaultTolerance());

    for (Pair<String, String> parameter : parameters) {
        if ("file".equals(parameter.getKey())) {
            if (contextPath != null) {
                spec.getImagePaths().add(contextPath + File.separator + parameter.getValue());
            } else {
                spec.getImagePaths().add(parameter.getValue());
            }
        } else if ("error".equals(parameter.getKey())) {
            spec.setErrorRate(SpecImage.ErrorRate.fromString(parameter.getValue()));
        } else if ("tolerance".equals(parameter.getKey())) {
            spec.setTolerance(parseIntegerParameter("tolerance", parameter.getValue()));
        } else if ("analyze-offset".equals(parameter.getKey())) {
            spec.setAnalyzeOffset(parseIntegerParameter("analyze-offset", parameter.getValue()));
        } else if ("stretch".equals(parameter.getKey())) {
            spec.setStretch(true);
        } else if ("area".equals(parameter.getKey())) {
            spec.setSelectedArea(parseRect(parameter.getValue()));
        } else if ("filter".equals(parameter.getKey())) {
            ImageFilter filter = parseImageFilter(parameter.getValue(), contextPath);
            spec.getOriginalFilters().add(filter);
            spec.getSampleFilters().add(filter);
        } else if ("filter-a".equals(parameter.getKey())) {
            ImageFilter filter = parseImageFilter(parameter.getValue(), contextPath);
            spec.getOriginalFilters().add(filter);
        } else if ("filter-b".equals(parameter.getKey())) {
            ImageFilter filter = parseImageFilter(parameter.getValue(), contextPath);
            spec.getSampleFilters().add(filter);
        } else if ("map-filter".equals(parameter.getKey())) {
            ImageFilter filter = parseImageFilter(parameter.getValue(), contextPath);
            spec.getMapFilters().add(filter);
        } else if ("crop-if-outside".equals(parameter.getKey())) {
            spec.setCropIfOutside(true);
        } else if ("ignore-objects".equals(parameter.getKey())) {
            String ignoreObjects = parseExcludeObjects(parameter.getValue());
            if (spec.getIgnoredObjectExpressions() == null) {
                spec.setIgnoredObjectExpressions(new LinkedList<>());
            }
            spec.getIgnoredObjectExpressions().add(ignoreObjects);
        } else {
            throw new SyntaxException("Unknown parameter: " + parameter.getKey());
        }
    }

    if (spec.getImagePaths() == null || spec.getImagePaths().size() == 0) {
        throw new SyntaxException("There are no images defined");
    }
    return spec;
}

From source file:forge.game.GameOutcome.java

public boolean isWinner(final LobbyPlayer who) {
    for (Pair<LobbyPlayer, PlayerStatistics> pv : playerRating)
        if (pv.getValue().getOutcome().hasWon() && pv.getKey() == who)
            return true;
    return false;
}

From source file:com.splicemachine.derby.stream.function.merge.AbstractMergeJoinFlatMapFunction.java

protected void initRightScan(PeekingIterator<LocatedRow> leftPeekingIterator) throws StandardException {
    ExecRow firstHashRow = joinOperation.getKeyRow(leftPeekingIterator.peek().getRow());
    ExecRow startPosition = joinOperation.getRightResultSet().getStartPosition();
    int[] columnOrdering = getColumnOrdering(joinOperation.getRightResultSet());
    int nCols = startPosition != null ? startPosition.nColumns() : 0;
    ExecRow scanStartOverride = null;//  ww  w.jav  a  2 s  .co  m
    int[] scanKeys = null;
    // If start row of right table scan has as many columns as key colummns of the table, cannot further
    // narrow down scan space, so return right tabel scan start row.
    if (nCols == columnOrdering.length) {
        scanStartOverride = startPosition;
        scanKeys = columnOrdering;
    } else {
        int[] rightHashKeys = joinOperation.getRightHashKeys();
        // Find valid hash column values to narrow down right scan. The valid hash columns must:
        // 1) not be used as a start key for inner table scan
        // 2) be consecutive
        // 3) be a key column
        LinkedList<Pair<Integer, Integer>> hashColumnIndexList = new LinkedList<>();
        for (int i = 0; i < rightHashKeys.length; ++i) {
            if (rightHashKeys[i] > nCols - 1) {
                if ((hashColumnIndexList.isEmpty()
                        || hashColumnIndexList.getLast().getValue() == rightHashKeys[i] - 1)
                        && isKeyColumn(columnOrdering, rightHashKeys[i])) {
                    hashColumnIndexList.add(new ImmutablePair<Integer, Integer>(i, rightHashKeys[i]));
                } else {
                    break;
                }
            }
        }

        scanStartOverride = new ValueRow(nCols + hashColumnIndexList.size());
        if (startPosition != null) {
            for (int i = 1; i <= startPosition.nColumns(); ++i) {
                scanStartOverride.setColumn(i, startPosition.getColumn(i));
            }
        }
        for (int i = 0; i < hashColumnIndexList.size(); ++i) {
            Pair<Integer, Integer> hashColumnIndex = hashColumnIndexList.get(i);
            int index = hashColumnIndex.getKey();
            scanStartOverride.setColumn(nCols + i + 1, firstHashRow.getColumn(index + 1));
        }

        // Scan key should include columns
        // 1) preceding the first hash column, these columns are in the form of "col=constant"
        // 2) all hash columns that are key columns
        scanKeys = new int[hashColumnIndexList.size() + rightHashKeys[0]];
        for (int i = 0; i < rightHashKeys[0]; ++i) {
            scanKeys[i] = i;
        }
        for (int i = 0; i < hashColumnIndexList.size(); ++i) {
            Pair<Integer, Integer> hashColumnIndex = hashColumnIndexList.get(i);
            int colPos = hashColumnIndex.getValue();
            scanKeys[rightHashKeys[0] + i] = colPos;
        }
    }

    ((BaseActivation) joinOperation.getActivation()).setScanStartOverride(scanStartOverride);
    ((BaseActivation) joinOperation.getActivation()).setScanKeys(scanKeys);
    if (startPosition != null) {
        ((BaseActivation) joinOperation.getActivation()).setScanStopOverride(startPosition);
    }

}

From source file:com.galenframework.speclang2.pagespec.PageSectionProcessor.java

private Pair<PageRule, Map<String, String>> findAndProcessRule(String ruleText, StructNode ruleNode) {
    ListIterator<Pair<Rule, PageRule>> iterator = pageSpecHandler.getPageRules()
            .listIterator(pageSpecHandler.getPageRules().size());
    /*/* w  ww.  j  ava 2 s  .c  o m*/
    It is important to make a reversed iteration over all rules so that
    it is possible for the end user to override previously defined rules
     */

    while (iterator.hasPrevious()) {
        Pair<Rule, PageRule> rulePair = iterator.previous();
        Matcher matcher = rulePair.getKey().getPattern().matcher(ruleText);
        if (matcher.matches()) {
            int index = 1;

            Map<String, String> parameters = new HashMap<>();

            for (String parameterName : rulePair.getKey().getParameters()) {
                String value = matcher.group(index);
                pageSpecHandler.setGlobalVariable(parameterName, value, ruleNode);

                parameters.put(parameterName, value);
                index += 1;
            }

            return new ImmutablePair<>(rulePair.getValue(), parameters);
        }
    }
    throw new SyntaxException(ruleNode, "Couldn't find rule matching: " + ruleText);
}