Example usage for org.apache.commons.lang3.tuple Triple getLeft

List of usage examples for org.apache.commons.lang3.tuple Triple getLeft

Introduction

In this page you can find the example usage for org.apache.commons.lang3.tuple Triple getLeft.

Prototype

public abstract L getLeft();

Source Link

Document

Gets the left element from this triple.

Usage

From source file:com.act.lcms.ExtractFromNetCDFAroundMass.java

public static void main(String[] args) throws Exception {
    if (args.length != 4 || !args[0].endsWith(".nc")) {
        throw new RuntimeException(
                "Needs (1) NetCDF .nc file, " + "(2) mass value, e.g., 132.0772 for debugging, "
                        + "(3) how many timepoints to process (-1 for all), "
                        + "(4) prefix for .data and rendered .pdf, '-' for stdout");
    }/*from  w  ww  .  j  av  a  2s . c om*/

    String netCDF = args[0];
    Double mz = Double.parseDouble(args[1]);
    Integer numSpectraToProcess = Integer.parseInt(args[2]);
    String outPrefix = args[3];
    String outPDF = outPrefix.equals("-") ? null : outPrefix + ".pdf";
    String outDATA = outPrefix.equals("-") ? null : outPrefix + ".data";

    ExtractFromNetCDFAroundMass e = new ExtractFromNetCDFAroundMass();
    List<Triple<Double, Double, Double>> window = e.get2DWindow(netCDF, mz, numSpectraToProcess);

    // Write data output to outfile
    PrintStream whereTo = outDATA == null ? System.out : new PrintStream(new FileOutputStream(outDATA));
    for (Triple<Double, Double, Double> xyz : window) {
        whereTo.format("%.4f\t%.4f\t%.4f\n", xyz.getLeft(), xyz.getMiddle(), xyz.getRight());
        whereTo.flush();
    }

    if (outDATA != null) {
        // if outDATA is != null, then we have written to .data file
        // now render the .data to the corresponding .pdf file

        // first close the .data
        whereTo.close();

        // render outDATA to outPDF using gnuplo
        Gnuplotter plotter = new Gnuplotter();
        plotter.plot3D(outDATA, outPDF, netCDF, mz);
    }
}

From source file:it.acubelab.smaph.learn.GenerateModel.java

public static void main(String[] args) throws Exception {
    Locale.setDefault(Locale.US);
    String freebKey = "";

    SmaphConfig.setConfigFile("smaph-config.xml");
    String bingKey = SmaphConfig.getDefaultBingKey();

    WikipediaApiInterface wikiApi = new WikipediaApiInterface("wid.cache", "redirect.cache");
    FreebaseApi freebApi = new FreebaseApi(freebKey, "freeb.cache");
    double[][] paramsToTest = new double[][] {
            /*//from w  w w  .ja  v  a  2  s .c o m
             * {0.035, 0.5 }, {0.035, 1 }, {0.035, 4 }, {0.035, 8 }, {0.035, 10 },
             * {0.035, 16 }, {0.714, .5 }, {0.714, 1 }, {0.714, 4 }, {0.714, 8 },
             * {0.714, 10 }, {0.714, 16 }, {0.9, .5 }, {0.9, 1 }, {0.9, 4 }, {0.9, 8
             * }, {0.9, 10 }, {0.9, 16 },
             * 
             * { 1.0/15.0, 1 }, { 1.0/27.0, 1 },
             */

            /*
             * {0.01, 1}, {0.01, 5}, {0.01, 10}, {0.03, 1}, {0.03, 5}, {0.03, 10},
             * {0.044, 1}, {0.044, 5}, {0.044, 10}, {0.06, 1}, {0.06, 5}, {0.06,
             * 10},
             */
            { 0.03, 5 }, };
    double[][] weightsToTest = new double[][] {

            /*
             * { 3, 4 }
             */
            { 3.8, 3 }, { 3.8, 4 }, { 3.8, 5 }, { 3.8, 6 }, { 3.8, 7 }, { 3.8, 8 }, { 3.8, 9 }, { 3.8, 10 },

    };
    Integer[][] featuresSetsToTest = new Integer[][] {
            //{ 1, 2, 3, 6, 7, 8,   9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25 },
            { 1, 2, 3, 6, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25 },
            /*
             * { 1, 2, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18},
             */

    }; // < -------------------------------------- MIND THIS
    int wikiSearckTopK = 10; // <---------------------------
    String filePrefix = "_ANW";// <---------------------------

    WikipediaToFreebase wikiToFreebase = new WikipediaToFreebase("mapdb");
    List<ModelConfigurationResult> mcrs = new Vector<>();
    for (double editDistanceThr = 0.7; editDistanceThr <= 0.7; editDistanceThr += 0.7) {
        SmaphAnnotator bingAnnotator = GenerateTrainingAndTest.getDefaultBingAnnotator(wikiApi, wikiToFreebase,
                editDistanceThr, wikiSearckTopK, bingKey);
        WATAnnotator.setCache("wikisense.cache");
        SmaphAnnotator.setCache(SmaphConfig.getDefaultBingCache());

        BinaryExampleGatherer trainEntityFilterGatherer = new BinaryExampleGatherer();
        BinaryExampleGatherer testEntityFilterGatherer = new BinaryExampleGatherer();
        GenerateTrainingAndTest.gatherExamplesTrainingAndDevel(bingAnnotator, trainEntityFilterGatherer,
                testEntityFilterGatherer, wikiApi, wikiToFreebase, freebApi);

        SmaphAnnotator.unSetCache();
        BinaryExampleGatherer trainGatherer = trainEntityFilterGatherer; // //////////////
        // <----------------------
        BinaryExampleGatherer testGatherer = testEntityFilterGatherer; // //////////////
        // <----------------------

        int count = 0;
        for (Integer[] ftrToTestArray : featuresSetsToTest) {
            // double gamma = 1.0 / ftrToTestArray.length; //
            // <--------------------- MIND THIS
            // double C = 1;// < -------------------------------------- MIND
            // THIS
            for (double[] paramsToTestArray : paramsToTest) {
                double gamma = paramsToTestArray[0];
                double C = paramsToTestArray[1];
                for (double[] weightsPosNeg : weightsToTest) {
                    double wPos = weightsPosNeg[0], wNeg = weightsPosNeg[1];
                    Vector<Integer> features = new Vector<>(Arrays.asList(ftrToTestArray));
                    Triple<svm_problem, double[], double[]> ftrsMinsMaxs = TuneModel
                            .getScaledTrainProblem(features, trainGatherer);
                    svm_problem trainProblem = ftrsMinsMaxs.getLeft();

                    String fileBase = getModelFileNameBaseEF(features.toArray(new Integer[0]), wPos, wNeg,
                            editDistanceThr, gamma, C) + filePrefix;
                    /*
                     * String fileBase = getModelFileNameBaseEQF(
                     * features.toArray(new Integer[0]), wPos, wNeg);
                     */// < -------------------------
                    LibSvmUtils.dumpRanges(ftrsMinsMaxs.getMiddle(), ftrsMinsMaxs.getRight(),
                            fileBase + ".range");
                    svm_model model = TuneModel.trainModel(wPos, wNeg, features, trainProblem, gamma, C);
                    svm.svm_save_model(fileBase + ".model", model);

                    MetricsResultSet metrics = TuneModel.ParameterTester.computeMetrics(model,
                            TuneModel.getScaledTestProblems(features, testGatherer, ftrsMinsMaxs.getMiddle(),
                                    ftrsMinsMaxs.getRight()));

                    int tp = metrics.getGlobalTp();
                    int fp = metrics.getGlobalFp();
                    int fn = metrics.getGlobalFn();
                    float microF1 = metrics.getMicroF1();
                    float macroF1 = metrics.getMacroF1();
                    float macroRec = metrics.getMacroRecall();
                    float macroPrec = metrics.getMacroPrecision();
                    int totVects = testGatherer.getExamplesCount();
                    mcrs.add(new ModelConfigurationResult(features, wPos, wNeg, editDistanceThr, tp, fp, fn,
                            totVects - tp - fp - fn, microF1, macroF1, macroRec, macroPrec));

                    System.err.printf("Trained %d/%d models.%n", ++count,
                            weightsToTest.length * featuresSetsToTest.length * paramsToTest.length);
                }
            }
        }
    }
    for (ModelConfigurationResult mcr : mcrs)
        System.out.printf("%.5f%%\t%.5f%%\t%.5f%%%n", mcr.getMacroPrecision() * 100, mcr.getMacroRecall() * 100,
                mcr.getMacroF1() * 100);
    for (double[] weightPosNeg : weightsToTest)
        System.out.printf("%.5f\t%.5f%n", weightPosNeg[0], weightPosNeg[1]);
    for (ModelConfigurationResult mcr : mcrs)
        System.out.println(mcr.getReadable());
    for (double[] paramGammaC : paramsToTest)
        System.out.printf("%.5f\t%.5f%n", paramGammaC[0], paramGammaC[1]);
    WATAnnotator.flush();
}

From source file:edu.umd.umiacs.clip.tools.math.Formatter.java

public static Triple<Double, Double, Double> format(Triple<Double, Double, Double> triple) {
    return Triple.of(format(triple.getLeft()), format(triple.getMiddle()), format(triple.getRight()));
}

From source file:ivorius.ivtoolkit.maze.components.MazeComponentConnector.java

public static <M extends WeightedMazeComponent<C>, C> List<ShiftedMazeComponent<M, C>> randomlyConnect(
        MorphingMazeComponent<C> morphingComponent, List<M> components,
        ConnectionStrategy<C> connectionStrategy, final MazeComponentPlacementStrategy<M, C> placementStrategy,
        Random random) {/*from w  w w.  java  2  s  .  c  o m*/
    List<ShiftedMazeComponent<M, C>> result = new ArrayList<>();
    Deque<Triple<MazeRoom, MazeRoomConnection, C>> exitStack = new ArrayDeque<>();

    Predicate<ShiftedMazeComponent<M, C>> componentPredicate = Predicates.and(
            MazeComponents.<M, C>compatibilityPredicate(morphingComponent, connectionStrategy),
            MazeComponentPlacementStrategies.placeable(placementStrategy));
    WeightedSelector.WeightFunction<ShiftedMazeComponent<M, C>> weightFunction = getWeightFunction();

    addAllExits(placementStrategy, exitStack, morphingComponent.exits().entrySet());

    while (exitStack.size() > 0) {
        Triple<MazeRoom, MazeRoomConnection, C> triple = exitStack.removeLast();
        MazeRoom room = triple.getLeft();

        if (morphingComponent.rooms().contains(room))
            continue; // Has been filled while queued

        MazeRoomConnection exit = triple.getMiddle();
        C connection = triple.getRight();

        List<ShiftedMazeComponent<M, C>> placeable = FluentIterable.from(components)
                .transformAndConcat(MazeComponents.<M, C>shiftAllFunction(exit, connection, connectionStrategy))
                .filter(componentPredicate).toList();

        if (placeable.size() == 0) {
            IvToolkitCoreContainer.logger.warn("Did not find fitting component for maze!");
            IvToolkitCoreContainer.logger.warn("Suggested: X with exits "
                    + FluentIterable.from(morphingComponent.exits().entrySet()).filter(entryConnectsTo(room)));
            continue;
        }

        ShiftedMazeComponent<M, C> selected = WeightedSelector.canSelect(placeable, weightFunction)
                ? WeightedSelector.select(random, placeable, weightFunction)
                : placeable.get(random.nextInt(placeable.size())); // All weight 0 = select at random

        addAllExits(placementStrategy, exitStack, selected.exits().entrySet());

        morphingComponent.add(selected);
        result.add(selected);
    }

    return result;
}

From source file:com.linkedin.pinot.common.response.ServerInstance.java

public static ServerInstance forHostPort(String name, int port) {
    if (nameToInstanceInfo.containsKey(name)) {
        Triple<String, String, InetAddress> instanceInfo = nameToInstanceInfo.get(name);
        return new ServerInstance(instanceInfo.getLeft(), instanceInfo.getMiddle(), instanceInfo.getRight(),
                port, 0);/*from w w w. ja v a2 s . c om*/
    } else {
        ServerInstance newInstance = new ServerInstance(name, port);

        nameToInstanceInfo.putIfAbsent(name, Triple.of(newInstance.getHostname(),
                newInstance.getShortHostName(), newInstance.getIpAddress()));

        return newInstance;
    }
}

From source file:gobblin.ingestion.google.webmaster.UrlTriePrefixGrouper.java

/**
 * Get the detailed pages under this group
 *///from  w w w .java  2 s  .  c o  m
public static ArrayList<String> groupToPages(
        Triple<String, GoogleWebmasterFilter.FilterOperator, UrlTrieNode> group) {
    ArrayList<String> ret = new ArrayList<>();
    if (group.getMiddle().equals(GoogleWebmasterFilter.FilterOperator.EQUALS)) {
        if (group.getRight().isExist()) {
            ret.add(group.getLeft());
        }
    } else if (group.getMiddle().equals(GoogleWebmasterFilter.FilterOperator.CONTAINS)) {
        UrlTrie trie = new UrlTrie(group.getLeft(), group.getRight());
        Iterator<Pair<String, UrlTrieNode>> iterator = new UrlTriePostOrderIterator(trie, 1);
        while (iterator.hasNext()) {
            Pair<String, UrlTrieNode> next = iterator.next();
            if (next.getRight().isExist()) {
                ret.add(next.getLeft());
            }
        }
    }
    return ret;
}

From source file:com.etsy.arbiter.workflow.WorkflowGraphBuilder.java

/**
 * Build a workflow graph from the workflow definition, inserting fork/join pairs as appropriate for parallel
 *
 * @param workflow Arbiter Workflow object
 * @param config Arbiter Config object/*from  ww  w . ja v a2 s. c o m*/
 * @param outputDir Output directory for Graphviz graphs
 * @param generateGraphviz Indicate if Graphviz graphs should be generated for workflows
 * @param graphvizFormat The format in which Graphviz graphs should be generated if enabled
 * @return DirectedAcyclicGraph DAG of the workflow
 * @throws WorkflowGraphException
 */
public static DirectedAcyclicGraph<Action, DefaultEdge> buildWorkflowGraph(Workflow workflow, Config config,
        String outputDir, boolean generateGraphviz, String graphvizFormat) throws WorkflowGraphException {
    forkCount = 0;
    Map<String, Action> actionsByName = new HashMap<>();
    List<Action> workflowActions = workflow.getActions();

    // Add all the actions to a map of string -> action
    for (Action a : workflowActions) {
        actionsByName.put(a.getName(), a);
    }

    // DAG of the workflow in its raw un-optimized state.
    DirectedAcyclicGraph<Action, DefaultEdge> inputGraph = new DirectedAcyclicGraph<>(DefaultEdge.class);

    // Add all the actions as vertices. At this point there are no connections within the graph, just vertices.
    for (Action a : workflowActions) {
        inputGraph.addVertex(a);
    }

    // We need to traverse a second time so all the vertices are present when adding edges
    for (Action a : workflowActions) {
        if (a.getDependencies() != null) {
            for (String d : a.getDependencies()) {
                Action source = actionsByName.get(d);
                if (source == null) {
                    throw new WorkflowGraphException("Missing action for dependency " + d);
                }

                // Add the edge between the dep and the action
                try {
                    inputGraph.addDagEdge(source, a);
                } catch (DirectedAcyclicGraph.CycleFoundException e) {
                    throw new WorkflowGraphException("Cycle found while building original graph", e);
                }
            }
        }
    }

    if (generateGraphviz) {
        GraphvizGenerator.generateGraphviz(inputGraph, outputDir + "/" + workflow.getName() + "-input.dot",
                graphvizFormat);
    }

    // Final DAG we will be returning
    DirectedAcyclicGraph<Action, DefaultEdge> workflowGraph;
    Action startTransitionNode;
    Action endTransitionNode;

    try {
        // Process the graph into its properly connected and organized structure.
        Triple<DirectedAcyclicGraph<Action, DefaultEdge>, Action, Action> workflowGraphTriple = processSubcomponents(
                inputGraph);
        workflowGraph = workflowGraphTriple.getLeft();
        startTransitionNode = workflowGraphTriple.getMiddle();
        endTransitionNode = workflowGraphTriple.getRight();

        // These are the standard control flow nodes that must be present in every workflow
        Action start = new Action();
        start.setName("start");
        start.setType("start");
        workflowGraph.addVertex(start);
        workflowGraph.addDagEdge(start, startTransitionNode);

        Action end = new Action();
        end.setName("end");
        end.setType("end");
        workflowGraph.addVertex(end);

        if (workflow.getErrorHandler() != null) {
            workflowGraph.addVertex(workflow.getErrorHandler());
            workflowGraph.addDagEdge(workflow.getErrorHandler(), end);
            workflowGraph.addDagEdge(endTransitionNode, workflow.getErrorHandler());
        } else {
            workflowGraph.addDagEdge(endTransitionNode, end);
        }

        // The kill node will be used as the error transition when generating the XML as appropriate
        // These is no need to add any edges to it now
        if (config.getKillMessage() != null && config.getKillName() != null) {
            Action kill = new Action();
            kill.setType("kill");
            kill.setName(config.getKillName());
            kill.setProperty("message", NamedArgumentInterpolator.interpolate(config.getKillMessage(),
                    ImmutableMap.of("name", workflow.getName()), null));
            workflowGraph.addVertex(kill);
        }
    } catch (DirectedAcyclicGraph.CycleFoundException e) {
        throw new WorkflowGraphException("Cycle found while generating workflow", e);
    }

    return workflowGraph;
}

From source file:com.nttec.everychan.ui.tabs.TabsTrackerService.java

private static int findTab(List<Triple<String, String, String>> list, String tabUrl, String tabTitle) {
    for (int i = 0; i < list.size(); ++i) {
        Triple<String, String, String> triple = list.get(i);
        if (tabUrl == null) {
            if (triple.getLeft() == null && tabTitle.equals(triple.getRight())) {
                return i;
            }//from   ww  w  . ja  v a 2  s. c o  m
        } else {
            if (tabUrl.equals(triple.getLeft())) {
                return i;
            }
        }
    }
    return -1;
}

From source file:com.etsy.arbiter.workflow.WorkflowGraphBuilder.java

/**
 * Recursively insert fork/joins for connected subcomponents of a graph
 *
 * @param vertices The set of vertices to process
 * @param parentGraph The parentGraph graph of these vertices
 * @return DirectedAcyclicGraph A new graph containing all the given vertices with appropriate fork/join pairs inserted
 * @throws WorkflowGraphException/*from  www. j av a2s  . c  o  m*/
 * @throws DirectedAcyclicGraph.CycleFoundException
 */
private static DirectedAcyclicGraph<Action, DefaultEdge> buildComponentGraph(Set<Action> vertices,
        DirectedAcyclicGraph<Action, DefaultEdge> parentGraph)
        throws WorkflowGraphException, DirectedAcyclicGraph.CycleFoundException {
    DirectedAcyclicGraph<Action, DefaultEdge> subgraph = buildSubgraph(parentGraph, vertices);

    // Start by pulling out the vertices with no incoming edges
    // These can run in parallel in a fork-join
    Set<Action> initialNodes = new HashSet<>();
    for (Action vertex : subgraph.vertexSet()) {
        if (subgraph.inDegreeOf(vertex) == 0) {
            initialNodes.add(vertex);
        }
    }

    DirectedAcyclicGraph<Action, DefaultEdge> result = new DirectedAcyclicGraph<>(DefaultEdge.class);

    if (initialNodes.isEmpty()) {
        // This is a very odd case, but just in case we'll fail if it happens
        throw new WorkflowGraphException("No nodes with inDegree = 0 found.  This shouldn't happen.");
    } else if (initialNodes.size() == 1) {
        // If there is only one node, we can't put it in a fork/join
        // In this case we'll add just that vertex to the resulting graph
        Action vertex = initialNodes.iterator().next();
        result.addVertex(vertex);
        // Remove the processed vertex so that we have new unprocessed subcomponents
        subgraph.removeVertex(vertex);
    } else {
        // If there are multiple nodes, insert a fork/join pair to run them in parallel
        Pair<Action, Action> forkJoin = addForkJoin(result);
        Action fork = forkJoin.getLeft();
        Action join = forkJoin.getRight();
        for (Action vertex : initialNodes) {
            result.addVertex(vertex);
            result.addDagEdge(fork, vertex);
            result.addDagEdge(vertex, join);
            // Remove the processed vertex so that we have new unprocessed subcomponents
            subgraph.removeVertex(vertex);
        }
    }

    // Now recursively process the graph with the processed nodes removed
    Triple<DirectedAcyclicGraph<Action, DefaultEdge>, Action, Action> subComponentGraphTriple = processSubcomponents(
            subgraph);
    DirectedAcyclicGraph<Action, DefaultEdge> subComponentGraph = subComponentGraphTriple.getLeft();

    // Having processed the subcomponents, we attach the "last" node of the graph created here to
    // the "first" node of the subcomponent graph
    Action noIncoming = subComponentGraphTriple.getMiddle();
    Action noOutgoing = null;

    for (Action vertex : result.vertexSet()) {
        if (noOutgoing == null && result.outDegreeOf(vertex) == 0) {
            noOutgoing = vertex;
        }
    }

    Graphs.addGraph(result, subComponentGraph);
    if (noOutgoing != null && noIncoming != null && !noOutgoing.equals(noIncoming)) {
        result.addDagEdge(noOutgoing, noIncoming);
    }
    return result;
}

From source file:blusunrize.immersiveengineering.api.energy.wires.ImmersiveNetHandler.java

private static void handleMapForDamage(Set<Triple<Connection, Vec3d, Vec3d>> in, EntityLivingBase e,
        BlockPos here) {//from www. ja v a  2 s. c  o m
    final double KNOCKBACK_PER_DAMAGE = 10;
    if (!in.isEmpty()) {
        AxisAlignedBB eAabb = e.getEntityBoundingBox();
        for (Triple<Connection, Vec3d, Vec3d> conn : in)
            if (conn.getLeft().cableType.canCauseDamage()) {
                double extra = conn.getLeft().cableType.getDamageRadius();
                AxisAlignedBB includingExtra = eAabb.grow(extra).offset(-here.getX(), -here.getY(),
                        -here.getZ());
                boolean endpointsInEntity = includingExtra.contains(conn.getMiddle())
                        || includingExtra.contains(conn.getRight());
                RayTraceResult rayRes = endpointsInEntity ? null
                        : includingExtra.calculateIntercept(conn.getMiddle(), conn.getRight());
                if (endpointsInEntity || (rayRes != null && rayRes.typeOfHit == RayTraceResult.Type.BLOCK)) {
                    IImmersiveConnectable iic = toIIC(conn.getLeft().start, e.world);
                    float damage = 0;
                    if (iic != null)
                        damage = iic.getDamageAmount(e, conn.getLeft());
                    if (damage == 0) {
                        iic = toIIC(conn.getLeft().end, e.world);
                        if (iic != null)
                            damage = iic.getDamageAmount(e, conn.getLeft());
                    }
                    if (damage != 0) {
                        IEDamageSources.ElectricDamageSource dmg = IEDamageSources.causeWireDamage(damage,
                                conn.getLeft().cableType.getElectricSource());
                        if (dmg.apply(e)) {
                            damage = dmg.dmg;
                            Vec3d v = e.getLookVec();
                            knockbackNoSource(e, damage / KNOCKBACK_PER_DAMAGE, v.x, v.z);
                            iic.processDamage(e, damage, conn.getLeft());
                        }
                    }
                }
            }
    }
}