Example usage for org.apache.commons.lang3.tuple Pair getRight

List of usage examples for org.apache.commons.lang3.tuple Pair getRight

Introduction

In this page you can find the example usage for org.apache.commons.lang3.tuple Pair getRight.

Prototype

public abstract R getRight();

Source Link

Document

Gets the right element from this pair.

When treated as a key-value pair, this is the value.

Usage

From source file:edu.umd.umiacs.clip.tools.classifier.LibSVMUtils.java

public static Pair<List<String>, List<String>> scale(Pair<List<String>, List<String>> pair) {
    Map<Integer, Pair<Float, Float>> model = learnScalingModel(pair.getLeft());
    return Pair.of(applyScalingModel(model, pair.getLeft()), applyScalingModel(model, pair.getRight()));
}

From source file:com.addthis.hydra.kafka.consumer.ConsumerUtils.java

public static Pair<ConsumerConnector, KafkaStream<Bundle, Bundle>> newBundleConsumer(String zookeeper,
        String topic, HashMap<String, String> overrides) {
    Map<String, Integer> topicStreams = new HashMap<>();
    topicStreams.put(topic, 1);/*from w w w  .  ja  va  2  s .com*/
    Pair<ConsumerConnector, Map<String, List<KafkaStream<Bundle, Bundle>>>> connectorAndStreams = newBundleStreams(
            zookeeper, topicStreams, overrides);
    return new ImmutablePair<>(connectorAndStreams.getLeft(), connectorAndStreams.getRight().get(topic).get(0));
}

From source file:com.deepoove.poi.resolver.TemplateResolver.java

private static void calcRunPosInParagraph(List<XWPFRun> runs, List<Pair<RunEdge, RunEdge>> pairs) {
    int size = runs.size(), pos = 0, calc = 0;
    Pair<RunEdge, RunEdge> pair = pairs.get(pos);
    RunEdge leftEdge = pair.getLeft();//from   www  .  ja  v a  2 s .  c  om
    RunEdge rightEdge = pair.getRight();
    int leftInAll = leftEdge.getAllPos();
    int rightInAll = rightEdge.getAllPos();
    for (int i = 0; i < size; i++) {
        XWPFRun run = runs.get(i);
        String str = run.getText(0);
        if (null == str) {
            logger.warn("found the empty text run,may be produce bug:" + run);
            calc += run.toString().length();
            continue;
        }
        logger.debug(str);
        if (str.length() + calc < leftInAll) {
            calc += str.length();
            continue;
        }
        for (int j = 0; j < str.length(); j++) {
            if (calc + j == leftInAll) {
                leftEdge.setRunPos(i);
                leftEdge.setRunEdge(j);
                leftEdge.setText(str);
            }
            if (calc + j == rightInAll - 1) {
                rightEdge.setRunPos(i);
                rightEdge.setRunEdge(j);
                rightEdge.setText(str);

                if (pos == pairs.size() - 1)
                    break;
                pair = pairs.get(++pos);
                leftEdge = pair.getLeft();
                rightEdge = pair.getRight();
                leftInAll = leftEdge.getAllPos();
                rightInAll = rightEdge.getAllPos();
            }
        }
        calc += str.length();
    }
}

From source file:com.twitter.graphjet.bipartite.edgepool.EdgePoolConcurrentTestHelper.java

/**
 * This helper method sets up a concurrent read-write situation with a single writer and multiple
 * readers that access the same underlying edgePool, and tests for correct edge access during
 * simultaneous edge writes. This helps test read consistency during arbitrary points of
 * inserting edges. Note that the exact read-write sequence here is non-deterministic and would
 * vary depending on the machine, but the hope is that given the large number of readers the reads
 * would be done at many different points of edge insertion. The test itself checks only for
 * partial correctness (it could have false positives) so this should only be used as a supplement
 * to other testing./*w w w  .  j  av a2s . c  o  m*/
 *
 * @param edgePool           is the underlying
 *                           {@link com.twitter.graphjet.bipartite.edgepool.EdgePool}
 * @param numReadersPerNode  is the number of reader threads to use per node
 * @param leftSize           is the number of left nodes
 * @param rightSize          is the number of right nodes
 * @param edgeProbability    is the probability of an edge between a left-right node pair
 * @param random             is the random number generator to use for generating a random graph
 */
public static void testRandomConcurrentReadWriteThreads(EdgePool edgePool, int numReadersPerNode, int leftSize,
        int rightSize, double edgeProbability, Random random) {
    int maxWaitingTimeForThreads = 20; // in milliseconds
    int numReaders = leftSize * numReadersPerNode;
    CountDownLatch readersDoneLatch = new CountDownLatch(numReaders);
    // First, construct a random set of edges to insert in the graph
    Set<Pair<Integer, Integer>> edges = Sets
            .newHashSetWithExpectedSize((int) (leftSize * rightSize * edgeProbability));
    List<EdgePoolReader> readers = Lists.newArrayListWithCapacity(numReaders);
    Int2ObjectMap<IntSet> leftSideGraph = new Int2ObjectOpenHashMap<IntSet>(leftSize);
    int averageLeftDegree = (int) (rightSize * edgeProbability);
    for (int i = 0; i < leftSize; i++) {
        IntSet nodeEdges = new IntOpenHashSet(averageLeftDegree);
        for (int j = 0; j < rightSize; j++) {
            if (random.nextDouble() < edgeProbability) {
                nodeEdges.add(j);
                edges.add(Pair.of(i, j));
            }
        }
        leftSideGraph.put(i, nodeEdges);
    }

    // Create a bunch of leftReaders per node that'll read from the graph at random
    for (int i = 0; i < leftSize; i++) {
        for (int j = 0; j < numReadersPerNode; j++) {
            readers.add(new EdgePoolReader(edgePool, new CountDownLatch(0), readersDoneLatch, i,
                    random.nextInt(maxWaitingTimeForThreads)));
        }
    }

    // Create a single writer that will insert these edges in random order
    List<WriterInfo> writerInfo = Lists.newArrayListWithCapacity(edges.size());
    List<Pair<Integer, Integer>> edgesList = Lists.newArrayList(edges);
    Collections.shuffle(edgesList);
    CountDownLatch writerDoneLatch = new CountDownLatch(edgesList.size());
    for (Pair<Integer, Integer> edge : edgesList) {
        writerInfo.add(new WriterInfo(edge.getLeft(), edge.getRight(), new CountDownLatch(0), writerDoneLatch));
    }

    ExecutorService executor = Executors.newFixedThreadPool(numReaders + 1); // single writer
    List<Callable<Integer>> allThreads = Lists.newArrayListWithCapacity(numReaders + 1);
    // First, we add the writer
    allThreads.add(Executors.callable(new EdgePoolWriter(edgePool, writerInfo), 1));
    // then the readers
    for (int i = 0; i < numReaders; i++) {
        allThreads.add(Executors.callable(readers.get(i), 1));
    }
    // these will execute in some non-deterministic order
    Collections.shuffle(allThreads, random);

    // Wait for all the processes to finish
    try {
        List<Future<Integer>> results = executor.invokeAll(allThreads, 10, TimeUnit.SECONDS);
        for (Future<Integer> result : results) {
            assertTrue(result.isDone());
            assertEquals(1, result.get().intValue());
        }
    } catch (InterruptedException e) {
        throw new RuntimeException("Execution for a thread was interrupted: ", e);
    } catch (ExecutionException e) {
        throw new RuntimeException("Execution issue in an executor thread: ", e);
    }

    // confirm that these worked as expected
    try {
        readersDoneLatch.await();
        writerDoneLatch.await();
    } catch (InterruptedException e) {
        throw new RuntimeException("Execution for last reader was interrupted: ", e);
    }

    // Check that all readers' read info is consistent with the graph
    for (EdgePoolReader reader : readers) {
        IntSet expectedEdges = leftSideGraph.get(reader.queryNode);
        assertTrue(reader.getQueryNodeDegree() <= expectedEdges.size());
        if (reader.getQueryNodeDegree() == 0) {
            assertNull(reader.getQueryNodeEdges());
        } else {
            for (int edge : reader.getQueryNodeEdges()) {
                assertTrue(expectedEdges.contains(edge));
            }
        }
    }
}

From source file:com.splicemachine.db.impl.ast.PlanPrinter.java

public static String treeToString(Map<String, Object> nodeInfo) throws StandardException {
    List<Pair<Integer, Map>> subs = new LinkedList<>();
    StringBuilder sb = new StringBuilder();
    List<Map<String, Object>> nodes = linearizeNodeInfoTree(nodeInfo);
    for (Map<String, Object> node : nodes) {
        List<Map> subqs = (List<Map>) node.get("subqueries");
        if (subqs != null) {
            for (Map subInfo : subqs) {
                subs.add(Pair.of((Integer) node.get("n"), subInfo));
            }//from  ww  w  .  jav a  2 s .co  m
        }
        sb.append(infoToString(node, true));
        sb.append("\n");
    }
    for (Pair<Integer, Map> sub : subs) {
        Map subInfo = sub.getRight();
        Map<String, Object> subqInfoNode = (Map<String, Object>) subInfo.get("node");
        sb.append(subqueryToString(subInfo, subqInfoNode));
        sb.append(treeToString(subqInfoNode));
    }
    return sb.toString();
}

From source file:com.deepoove.poi.resolver.TemplateResolver.java

/**
 * running string Algorithm//from   w  ww  . j  a  va  2  s. c om
 * 
 * @param paragraph
 * @return
 */
public static List<RunTemplate> parseRun(XWPFParagraph paragraph) {
    List<XWPFRun> runs = paragraph.getRuns();
    if (null == runs || runs.isEmpty())
        return null;
    String text = paragraph.getText();
    logger.debug("Paragrah's text is:" + text);
    List<Pair<RunEdge, RunEdge>> pairs = new ArrayList<Pair<RunEdge, RunEdge>>();
    List<String> tags = new ArrayList<String>();
    calcTagPosInParagraph(text, pairs, tags);

    List<RunTemplate> rts = new ArrayList<RunTemplate>();
    if (pairs.isEmpty())
        return rts;
    RunTemplate runTemplate;
    calcRunPosInParagraph(runs, pairs);
    for (Pair<RunEdge, RunEdge> pai : pairs) {
        logger.debug(pai.getLeft().toString());
        logger.debug(pai.getRight().toString());
    }
    // split and merge
    Pair<RunEdge, RunEdge> pair2 = pairs.get(0);
    int length = pairs.size();
    int tagIndex = length;
    for (int n = length - 1; n >= 0; n--) {
        pair2 = pairs.get(n);
        RunEdge left2 = pair2.getLeft();
        RunEdge right2 = pair2.getRight();
        int left_r = left2.getRunPos();
        int right_r = right2.getRunPos();
        int runEdge = left2.getRunEdge();
        int runEdge2 = right2.getRunEdge();
        String text1 = runs.get(left_r).getText(0);
        String text2 = runs.get(right_r).getText(0);
        if (runEdge2 + 1 >= text2.length()) {
            if (left_r != right_r)
                paragraph.removeRun(right_r);
        } else {
            String substring = text2.substring(runEdge2 + 1, text2.length());
            if (left_r == right_r) {
                XWPFRun insertNewRun = paragraph.insertNewRun(right_r + 1);
                styleRun(insertNewRun, runs.get(right_r));
                insertNewRun.setText(substring, 0);
            } else
                runs.get(right_r).setText(substring, 0);
        }
        for (int m = right_r - 1; m > left_r; m--) {
            paragraph.removeRun(m);
        }
        if (runEdge <= 0) {
            runs.get(left_r).setText(tags.get(--tagIndex), 0);
            runTemplate = parseRun(runs.get(left_r));
        } else {
            String substring = text1.substring(0, runEdge);
            XWPFRun xwpfRun = runs.get(left_r);
            runs.get(left_r).setText(substring, 0);
            XWPFRun insertNewRun = paragraph.insertNewRun(left_r + 1);
            styleRun(insertNewRun, xwpfRun);
            insertNewRun.setText(tags.get(--tagIndex), 0);
            runTemplate = parseRun(runs.get(left_r + 1));
        }

        if (null != runTemplate) {
            rts.add(runTemplate);
        }
    }
    return rts;
}

From source file:com.acmutv.ontoqa.core.parser.SimpleSltagParser.java

private static void processAdjunctions(ParserState dashboard) throws LTAGException {
    List<String> words = dashboard.getWords();
    Sltag curr = dashboard.getCurr();/*  w  w  w. j av a  2  s . c  om*/
    Map<Integer, Triple<Variable, Variable, Set<Statement>>> missedMainVariables = dashboard
            .getMissedMainVariables();

    Iterator<Pair<Sltag, Integer>> waitingAdjunctions = dashboard.getAdjunctions().iterator();
    while (waitingAdjunctions.hasNext()) {
        Pair<Sltag, Integer> entry = waitingAdjunctions.next();
        Sltag toAdjunct = entry.getLeft();
        Integer start = entry.getRight();
        String startLexicalEntry = (start != null) ? words.get(start) : null;
        LtagNode localTarget = curr.firstMatch(toAdjunct.getRoot().getCategory(), startLexicalEntry, null);
        if (localTarget != null) { /* CAN MAKE ADJUNCTION */
            if (curr.getSemantics().getMainVariable() == null && toAdjunct.isLeftAdj()
                    && missedMainVariables.containsKey(start)) { /* INSPECT MAIN VARIABLE MISS */
                int lookup = (start != null) ? start : 0;
                Variable missedMainVar = missedMainVariables.get(lookup).getMiddle();
                LOGGER.warn("Found possible main variable miss at pos {}: {}", lookup, missedMainVar);
                curr.getSemantics().setMainVariable(missedMainVar);
                LOGGER.warn("Main variable temporarily set to: {}", missedMainVar);
                curr.adjunction(toAdjunct, localTarget);
                curr.getSemantics().setMainVariable(null);
                LOGGER.warn("Resetting main variable to NULL");
            } else if (curr.getSemantics().getMainVariable() == null && toAdjunct.isRightAdj()
                    && missedMainVariables.containsKey((start != null) ? start + 2 : 1)) {
                int lookup = (start != null) ? start + 2 : 1;
                Variable missedMainVar = missedMainVariables.get(lookup).getMiddle();
                LOGGER.warn("Found possible main variable miss at pos {}: {}", lookup, missedMainVar);
                curr.getSemantics().setMainVariable(missedMainVar);
                LOGGER.warn("Main variable temporarily set to: {}", missedMainVar);
                curr.adjunction(toAdjunct, localTarget);
                curr.getSemantics().setMainVariable(null);
                LOGGER.warn("Resetting main variable to NULL");
            } else {
                curr.adjunction(toAdjunct, localTarget);
            }
            LOGGER.debug("Adjuncted {} on {}", toAdjunct.toPrettyString(), localTarget);
            waitingAdjunctions.remove();
        }
    }
}

From source file:com.etsy.arbiter.workflow.WorkflowGraphBuilder.java

/**
 * Processes all connected subcomponents of a given graph
 *
 * @param parentGraph The graph for which to process subcomponents
 * @return A Triple with these elements - A new graph with fork/join pairs inserted, the "first" node in this graph, and the "last" node in this graph
 * @throws WorkflowGraphException//  w  w  w  . j  a  v a  2 s .com
 * @throws DirectedAcyclicGraph.CycleFoundException
 */
private static Triple<DirectedAcyclicGraph<Action, DefaultEdge>, Action, Action> processSubcomponents(
        DirectedAcyclicGraph<Action, DefaultEdge> parentGraph)
        throws WorkflowGraphException, DirectedAcyclicGraph.CycleFoundException {
    ConnectivityInspector<Action, DefaultEdge> inspector = new ConnectivityInspector<>(parentGraph);
    List<Set<Action>> connectedComponents = inspector.connectedSets();

    // Recursively process each connected subcomponent of the graph
    List<DirectedAcyclicGraph<Action, DefaultEdge>> componentGraphs = new ArrayList<>(
            connectedComponents.size());
    for (Set<Action> subComponent : connectedComponents) {
        componentGraphs.add(buildComponentGraph(subComponent, parentGraph));
    }

    DirectedAcyclicGraph<Action, DefaultEdge> result = new DirectedAcyclicGraph<>(DefaultEdge.class);
    for (DirectedAcyclicGraph<Action, DefaultEdge> subSubgraph : componentGraphs) {
        Graphs.addGraph(result, subSubgraph);
    }

    // If we have more than one subcomponent, we must insert a fork/join to run them in parallel
    if (componentGraphs.size() > 1) {
        Pair<Action, Action> forkJoin = addForkJoin(result);
        Action fork = forkJoin.getLeft();
        Action join = forkJoin.getRight();
        for (DirectedAcyclicGraph<Action, DefaultEdge> subSubgraph : componentGraphs) {
            for (Action vertex : subSubgraph.vertexSet()) {
                // Vertices with no incoming edges attach directly to the fork
                if (subSubgraph.inDegreeOf(vertex) == 0) {
                    result.addDagEdge(fork, vertex);
                }
                // Vertices with no outgoing edges attach directly to the join
                if (subSubgraph.outDegreeOf(vertex) == 0) {
                    result.addDagEdge(vertex, join);
                }
            }
        }
    }

    // The graph will now have one node with no outgoing edges and one node with no incoming edges
    // The node with no outgoing edges is the "last" node in the resulting graph
    // The node with no incoming edges is the "first" node in the resulting graph
    // These are pulled out specifically to make it easier to attach the resulting graph into another one
    Action noOutgoing = null;
    Action noIncoming = null;

    for (Action vertex : result.vertexSet()) {
        if (noIncoming == null && result.inDegreeOf(vertex) == 0) {
            noIncoming = vertex;
        }
    }

    for (Action vertex : result.vertexSet()) {
        if (noOutgoing == null && result.outDegreeOf(vertex) == 0) {
            noOutgoing = vertex;
        }
    }

    return Triple.of(result, noIncoming, noOutgoing);
}

From source file:com.etsy.arbiter.workflow.WorkflowGraphBuilder.java

/**
 * Recursively insert fork/joins for connected subcomponents of a graph
 *
 * @param vertices The set of vertices to process
 * @param parentGraph The parentGraph graph of these vertices
 * @return DirectedAcyclicGraph A new graph containing all the given vertices with appropriate fork/join pairs inserted
 * @throws WorkflowGraphException/*ww  w.j a  v  a2  s.  co m*/
 * @throws DirectedAcyclicGraph.CycleFoundException
 */
private static DirectedAcyclicGraph<Action, DefaultEdge> buildComponentGraph(Set<Action> vertices,
        DirectedAcyclicGraph<Action, DefaultEdge> parentGraph)
        throws WorkflowGraphException, DirectedAcyclicGraph.CycleFoundException {
    DirectedAcyclicGraph<Action, DefaultEdge> subgraph = buildSubgraph(parentGraph, vertices);

    // Start by pulling out the vertices with no incoming edges
    // These can run in parallel in a fork-join
    Set<Action> initialNodes = new HashSet<>();
    for (Action vertex : subgraph.vertexSet()) {
        if (subgraph.inDegreeOf(vertex) == 0) {
            initialNodes.add(vertex);
        }
    }

    DirectedAcyclicGraph<Action, DefaultEdge> result = new DirectedAcyclicGraph<>(DefaultEdge.class);

    if (initialNodes.isEmpty()) {
        // This is a very odd case, but just in case we'll fail if it happens
        throw new WorkflowGraphException("No nodes with inDegree = 0 found.  This shouldn't happen.");
    } else if (initialNodes.size() == 1) {
        // If there is only one node, we can't put it in a fork/join
        // In this case we'll add just that vertex to the resulting graph
        Action vertex = initialNodes.iterator().next();
        result.addVertex(vertex);
        // Remove the processed vertex so that we have new unprocessed subcomponents
        subgraph.removeVertex(vertex);
    } else {
        // If there are multiple nodes, insert a fork/join pair to run them in parallel
        Pair<Action, Action> forkJoin = addForkJoin(result);
        Action fork = forkJoin.getLeft();
        Action join = forkJoin.getRight();
        for (Action vertex : initialNodes) {
            result.addVertex(vertex);
            result.addDagEdge(fork, vertex);
            result.addDagEdge(vertex, join);
            // Remove the processed vertex so that we have new unprocessed subcomponents
            subgraph.removeVertex(vertex);
        }
    }

    // Now recursively process the graph with the processed nodes removed
    Triple<DirectedAcyclicGraph<Action, DefaultEdge>, Action, Action> subComponentGraphTriple = processSubcomponents(
            subgraph);
    DirectedAcyclicGraph<Action, DefaultEdge> subComponentGraph = subComponentGraphTriple.getLeft();

    // Having processed the subcomponents, we attach the "last" node of the graph created here to
    // the "first" node of the subcomponent graph
    Action noIncoming = subComponentGraphTriple.getMiddle();
    Action noOutgoing = null;

    for (Action vertex : result.vertexSet()) {
        if (noOutgoing == null && result.outDegreeOf(vertex) == 0) {
            noOutgoing = vertex;
        }
    }

    Graphs.addGraph(result, subComponentGraph);
    if (noOutgoing != null && noIncoming != null && !noOutgoing.equals(noIncoming)) {
        result.addDagEdge(noOutgoing, noIncoming);
    }
    return result;
}

From source file:de.tntinteractive.portalsammler.engine.SecureStore.java

private static void readIndex(final StorageLayer storage, final SecureRandom srand, final byte[] key,
        final SecureStore ret) throws IOException {
    final InputStream stream = storage.openInputStream("index");
    try {//from   www .  j  av  a 2  s . co  m
        final Pair<Integer, MapReader> saltAndReader = createMapReader(stream, srand, key);
        ret.indexSalt = saltAndReader.getLeft();
        final MapReader r = saltAndReader.getRight();

        Pair<String, Map<String, String>> p;
        while ((p = r.readNext()) != null) {
            final DocumentInfo di = DocumentInfo.parse(p.getLeft());
            ret.index.putDocument(di, p.getRight());
        }

        r.close();
    } finally {
        stream.close();
    }
}