List of usage examples for org.apache.commons.lang3.tuple Triple getMiddle
public abstract M getMiddle();
Gets the middle element from this triple.
From source file:com.act.lcms.ExtractFromNetCDFAroundMass.java
public static void main(String[] args) throws Exception { if (args.length != 4 || !args[0].endsWith(".nc")) { throw new RuntimeException( "Needs (1) NetCDF .nc file, " + "(2) mass value, e.g., 132.0772 for debugging, " + "(3) how many timepoints to process (-1 for all), " + "(4) prefix for .data and rendered .pdf, '-' for stdout"); }//from www. j ava2 s . c o m String netCDF = args[0]; Double mz = Double.parseDouble(args[1]); Integer numSpectraToProcess = Integer.parseInt(args[2]); String outPrefix = args[3]; String outPDF = outPrefix.equals("-") ? null : outPrefix + ".pdf"; String outDATA = outPrefix.equals("-") ? null : outPrefix + ".data"; ExtractFromNetCDFAroundMass e = new ExtractFromNetCDFAroundMass(); List<Triple<Double, Double, Double>> window = e.get2DWindow(netCDF, mz, numSpectraToProcess); // Write data output to outfile PrintStream whereTo = outDATA == null ? System.out : new PrintStream(new FileOutputStream(outDATA)); for (Triple<Double, Double, Double> xyz : window) { whereTo.format("%.4f\t%.4f\t%.4f\n", xyz.getLeft(), xyz.getMiddle(), xyz.getRight()); whereTo.flush(); } if (outDATA != null) { // if outDATA is != null, then we have written to .data file // now render the .data to the corresponding .pdf file // first close the .data whereTo.close(); // render outDATA to outPDF using gnuplo Gnuplotter plotter = new Gnuplotter(); plotter.plot3D(outDATA, outPDF, netCDF, mz); } }
From source file:it.acubelab.smaph.learn.GenerateModel.java
public static void main(String[] args) throws Exception { Locale.setDefault(Locale.US); String freebKey = ""; SmaphConfig.setConfigFile("smaph-config.xml"); String bingKey = SmaphConfig.getDefaultBingKey(); WikipediaApiInterface wikiApi = new WikipediaApiInterface("wid.cache", "redirect.cache"); FreebaseApi freebApi = new FreebaseApi(freebKey, "freeb.cache"); double[][] paramsToTest = new double[][] { /*/*from w ww . j a v a 2 s . co m*/ * {0.035, 0.5 }, {0.035, 1 }, {0.035, 4 }, {0.035, 8 }, {0.035, 10 }, * {0.035, 16 }, {0.714, .5 }, {0.714, 1 }, {0.714, 4 }, {0.714, 8 }, * {0.714, 10 }, {0.714, 16 }, {0.9, .5 }, {0.9, 1 }, {0.9, 4 }, {0.9, 8 * }, {0.9, 10 }, {0.9, 16 }, * * { 1.0/15.0, 1 }, { 1.0/27.0, 1 }, */ /* * {0.01, 1}, {0.01, 5}, {0.01, 10}, {0.03, 1}, {0.03, 5}, {0.03, 10}, * {0.044, 1}, {0.044, 5}, {0.044, 10}, {0.06, 1}, {0.06, 5}, {0.06, * 10}, */ { 0.03, 5 }, }; double[][] weightsToTest = new double[][] { /* * { 3, 4 } */ { 3.8, 3 }, { 3.8, 4 }, { 3.8, 5 }, { 3.8, 6 }, { 3.8, 7 }, { 3.8, 8 }, { 3.8, 9 }, { 3.8, 10 }, }; Integer[][] featuresSetsToTest = new Integer[][] { //{ 1, 2, 3, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25 }, { 1, 2, 3, 6, 7, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25 }, /* * { 1, 2, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18}, */ }; // < -------------------------------------- MIND THIS int wikiSearckTopK = 10; // <--------------------------- String filePrefix = "_ANW";// <--------------------------- WikipediaToFreebase wikiToFreebase = new WikipediaToFreebase("mapdb"); List<ModelConfigurationResult> mcrs = new Vector<>(); for (double editDistanceThr = 0.7; editDistanceThr <= 0.7; editDistanceThr += 0.7) { SmaphAnnotator bingAnnotator = GenerateTrainingAndTest.getDefaultBingAnnotator(wikiApi, wikiToFreebase, editDistanceThr, wikiSearckTopK, bingKey); WATAnnotator.setCache("wikisense.cache"); SmaphAnnotator.setCache(SmaphConfig.getDefaultBingCache()); BinaryExampleGatherer trainEntityFilterGatherer = new BinaryExampleGatherer(); BinaryExampleGatherer testEntityFilterGatherer = new BinaryExampleGatherer(); GenerateTrainingAndTest.gatherExamplesTrainingAndDevel(bingAnnotator, trainEntityFilterGatherer, testEntityFilterGatherer, wikiApi, wikiToFreebase, freebApi); SmaphAnnotator.unSetCache(); BinaryExampleGatherer trainGatherer = trainEntityFilterGatherer; // ////////////// // <---------------------- BinaryExampleGatherer testGatherer = testEntityFilterGatherer; // ////////////// // <---------------------- int count = 0; for (Integer[] ftrToTestArray : featuresSetsToTest) { // double gamma = 1.0 / ftrToTestArray.length; // // <--------------------- MIND THIS // double C = 1;// < -------------------------------------- MIND // THIS for (double[] paramsToTestArray : paramsToTest) { double gamma = paramsToTestArray[0]; double C = paramsToTestArray[1]; for (double[] weightsPosNeg : weightsToTest) { double wPos = weightsPosNeg[0], wNeg = weightsPosNeg[1]; Vector<Integer> features = new Vector<>(Arrays.asList(ftrToTestArray)); Triple<svm_problem, double[], double[]> ftrsMinsMaxs = TuneModel .getScaledTrainProblem(features, trainGatherer); svm_problem trainProblem = ftrsMinsMaxs.getLeft(); String fileBase = getModelFileNameBaseEF(features.toArray(new Integer[0]), wPos, wNeg, editDistanceThr, gamma, C) + filePrefix; /* * String fileBase = getModelFileNameBaseEQF( * features.toArray(new Integer[0]), wPos, wNeg); */// < ------------------------- LibSvmUtils.dumpRanges(ftrsMinsMaxs.getMiddle(), ftrsMinsMaxs.getRight(), fileBase + ".range"); svm_model model = TuneModel.trainModel(wPos, wNeg, features, trainProblem, gamma, C); svm.svm_save_model(fileBase + ".model", model); MetricsResultSet metrics = TuneModel.ParameterTester.computeMetrics(model, TuneModel.getScaledTestProblems(features, testGatherer, ftrsMinsMaxs.getMiddle(), ftrsMinsMaxs.getRight())); int tp = metrics.getGlobalTp(); int fp = metrics.getGlobalFp(); int fn = metrics.getGlobalFn(); float microF1 = metrics.getMicroF1(); float macroF1 = metrics.getMacroF1(); float macroRec = metrics.getMacroRecall(); float macroPrec = metrics.getMacroPrecision(); int totVects = testGatherer.getExamplesCount(); mcrs.add(new ModelConfigurationResult(features, wPos, wNeg, editDistanceThr, tp, fp, fn, totVects - tp - fp - fn, microF1, macroF1, macroRec, macroPrec)); System.err.printf("Trained %d/%d models.%n", ++count, weightsToTest.length * featuresSetsToTest.length * paramsToTest.length); } } } } for (ModelConfigurationResult mcr : mcrs) System.out.printf("%.5f%%\t%.5f%%\t%.5f%%%n", mcr.getMacroPrecision() * 100, mcr.getMacroRecall() * 100, mcr.getMacroF1() * 100); for (double[] weightPosNeg : weightsToTest) System.out.printf("%.5f\t%.5f%n", weightPosNeg[0], weightPosNeg[1]); for (ModelConfigurationResult mcr : mcrs) System.out.println(mcr.getReadable()); for (double[] paramGammaC : paramsToTest) System.out.printf("%.5f\t%.5f%n", paramGammaC[0], paramGammaC[1]); WATAnnotator.flush(); }
From source file:edu.umd.umiacs.clip.tools.math.Formatter.java
public static Triple<Double, Double, Double> format(Triple<Double, Double, Double> triple) { return Triple.of(format(triple.getLeft()), format(triple.getMiddle()), format(triple.getRight())); }
From source file:gobblin.ingestion.google.webmaster.UrlTriePrefixGrouper.java
/** * Get the detailed pages under this group *//* w ww .j av a 2 s . com*/ public static ArrayList<String> groupToPages( Triple<String, GoogleWebmasterFilter.FilterOperator, UrlTrieNode> group) { ArrayList<String> ret = new ArrayList<>(); if (group.getMiddle().equals(GoogleWebmasterFilter.FilterOperator.EQUALS)) { if (group.getRight().isExist()) { ret.add(group.getLeft()); } } else if (group.getMiddle().equals(GoogleWebmasterFilter.FilterOperator.CONTAINS)) { UrlTrie trie = new UrlTrie(group.getLeft(), group.getRight()); Iterator<Pair<String, UrlTrieNode>> iterator = new UrlTriePostOrderIterator(trie, 1); while (iterator.hasNext()) { Pair<String, UrlTrieNode> next = iterator.next(); if (next.getRight().isExist()) { ret.add(next.getLeft()); } } } return ret; }
From source file:com.linkedin.pinot.common.response.ServerInstance.java
public static ServerInstance forHostPort(String name, int port) { if (nameToInstanceInfo.containsKey(name)) { Triple<String, String, InetAddress> instanceInfo = nameToInstanceInfo.get(name); return new ServerInstance(instanceInfo.getLeft(), instanceInfo.getMiddle(), instanceInfo.getRight(), port, 0);//from ww w . j a v a 2 s .c o m } else { ServerInstance newInstance = new ServerInstance(name, port); nameToInstanceInfo.putIfAbsent(name, Triple.of(newInstance.getHostname(), newInstance.getShortHostName(), newInstance.getIpAddress())); return newInstance; } }
From source file:ivorius.ivtoolkit.maze.components.MazeComponentConnector.java
public static <M extends WeightedMazeComponent<C>, C> List<ShiftedMazeComponent<M, C>> randomlyConnect( MorphingMazeComponent<C> morphingComponent, List<M> components, ConnectionStrategy<C> connectionStrategy, final MazeComponentPlacementStrategy<M, C> placementStrategy, Random random) {// w w w . ja va2 s. co m List<ShiftedMazeComponent<M, C>> result = new ArrayList<>(); Deque<Triple<MazeRoom, MazeRoomConnection, C>> exitStack = new ArrayDeque<>(); Predicate<ShiftedMazeComponent<M, C>> componentPredicate = Predicates.and( MazeComponents.<M, C>compatibilityPredicate(morphingComponent, connectionStrategy), MazeComponentPlacementStrategies.placeable(placementStrategy)); WeightedSelector.WeightFunction<ShiftedMazeComponent<M, C>> weightFunction = getWeightFunction(); addAllExits(placementStrategy, exitStack, morphingComponent.exits().entrySet()); while (exitStack.size() > 0) { Triple<MazeRoom, MazeRoomConnection, C> triple = exitStack.removeLast(); MazeRoom room = triple.getLeft(); if (morphingComponent.rooms().contains(room)) continue; // Has been filled while queued MazeRoomConnection exit = triple.getMiddle(); C connection = triple.getRight(); List<ShiftedMazeComponent<M, C>> placeable = FluentIterable.from(components) .transformAndConcat(MazeComponents.<M, C>shiftAllFunction(exit, connection, connectionStrategy)) .filter(componentPredicate).toList(); if (placeable.size() == 0) { IvToolkitCoreContainer.logger.warn("Did not find fitting component for maze!"); IvToolkitCoreContainer.logger.warn("Suggested: X with exits " + FluentIterable.from(morphingComponent.exits().entrySet()).filter(entryConnectsTo(room))); continue; } ShiftedMazeComponent<M, C> selected = WeightedSelector.canSelect(placeable, weightFunction) ? WeightedSelector.select(random, placeable, weightFunction) : placeable.get(random.nextInt(placeable.size())); // All weight 0 = select at random addAllExits(placementStrategy, exitStack, selected.exits().entrySet()); morphingComponent.add(selected); result.add(selected); } return result; }
From source file:com.etsy.arbiter.workflow.WorkflowGraphBuilder.java
/** * Build a workflow graph from the workflow definition, inserting fork/join pairs as appropriate for parallel * * @param workflow Arbiter Workflow object * @param config Arbiter Config object/*from w w w . j a va2s.c o m*/ * @param outputDir Output directory for Graphviz graphs * @param generateGraphviz Indicate if Graphviz graphs should be generated for workflows * @param graphvizFormat The format in which Graphviz graphs should be generated if enabled * @return DirectedAcyclicGraph DAG of the workflow * @throws WorkflowGraphException */ public static DirectedAcyclicGraph<Action, DefaultEdge> buildWorkflowGraph(Workflow workflow, Config config, String outputDir, boolean generateGraphviz, String graphvizFormat) throws WorkflowGraphException { forkCount = 0; Map<String, Action> actionsByName = new HashMap<>(); List<Action> workflowActions = workflow.getActions(); // Add all the actions to a map of string -> action for (Action a : workflowActions) { actionsByName.put(a.getName(), a); } // DAG of the workflow in its raw un-optimized state. DirectedAcyclicGraph<Action, DefaultEdge> inputGraph = new DirectedAcyclicGraph<>(DefaultEdge.class); // Add all the actions as vertices. At this point there are no connections within the graph, just vertices. for (Action a : workflowActions) { inputGraph.addVertex(a); } // We need to traverse a second time so all the vertices are present when adding edges for (Action a : workflowActions) { if (a.getDependencies() != null) { for (String d : a.getDependencies()) { Action source = actionsByName.get(d); if (source == null) { throw new WorkflowGraphException("Missing action for dependency " + d); } // Add the edge between the dep and the action try { inputGraph.addDagEdge(source, a); } catch (DirectedAcyclicGraph.CycleFoundException e) { throw new WorkflowGraphException("Cycle found while building original graph", e); } } } } if (generateGraphviz) { GraphvizGenerator.generateGraphviz(inputGraph, outputDir + "/" + workflow.getName() + "-input.dot", graphvizFormat); } // Final DAG we will be returning DirectedAcyclicGraph<Action, DefaultEdge> workflowGraph; Action startTransitionNode; Action endTransitionNode; try { // Process the graph into its properly connected and organized structure. Triple<DirectedAcyclicGraph<Action, DefaultEdge>, Action, Action> workflowGraphTriple = processSubcomponents( inputGraph); workflowGraph = workflowGraphTriple.getLeft(); startTransitionNode = workflowGraphTriple.getMiddle(); endTransitionNode = workflowGraphTriple.getRight(); // These are the standard control flow nodes that must be present in every workflow Action start = new Action(); start.setName("start"); start.setType("start"); workflowGraph.addVertex(start); workflowGraph.addDagEdge(start, startTransitionNode); Action end = new Action(); end.setName("end"); end.setType("end"); workflowGraph.addVertex(end); if (workflow.getErrorHandler() != null) { workflowGraph.addVertex(workflow.getErrorHandler()); workflowGraph.addDagEdge(workflow.getErrorHandler(), end); workflowGraph.addDagEdge(endTransitionNode, workflow.getErrorHandler()); } else { workflowGraph.addDagEdge(endTransitionNode, end); } // The kill node will be used as the error transition when generating the XML as appropriate // These is no need to add any edges to it now if (config.getKillMessage() != null && config.getKillName() != null) { Action kill = new Action(); kill.setType("kill"); kill.setName(config.getKillName()); kill.setProperty("message", NamedArgumentInterpolator.interpolate(config.getKillMessage(), ImmutableMap.of("name", workflow.getName()), null)); workflowGraph.addVertex(kill); } } catch (DirectedAcyclicGraph.CycleFoundException e) { throw new WorkflowGraphException("Cycle found while generating workflow", e); } return workflowGraph; }
From source file:de.tu_dortmund.ub.data.dswarm.TaskProcessingUnit.java
private static void executeTPUPartsOnDemand(final Optional<Boolean> optionalDoInit, final Optional<Boolean> optionalAllowMultipleDataModels, String[] watchFolderFiles, final String resourceWatchFolder, final Optional<String> optionalOutputDataModelID, final String serviceName, final Integer engineThreads, final Optional<Boolean> optionalDoTransformations, final Optional<Boolean> optionalDoIngestOnTheFly, final Optional<Boolean> optionalDoExportOnTheFly, final Optional<String> optionalExportMimeType, final Optional<String> optionalExportFileExtension, final Properties config) throws Exception { // keys = input data models; values = related data resources final Map<String, Triple<String, String, String>> inputDataModelsAndResources = new HashMap<>(); // init/*from w ww .j a v a 2s .c o m*/ if (optionalDoInit.isPresent() && optionalDoInit.get()) { if (optionalAllowMultipleDataModels.isPresent() && optionalAllowMultipleDataModels.get()) { for (int i = 0; i < watchFolderFiles.length; i++) { final String initResourceFileName = watchFolderFiles[i]; doInit(resourceWatchFolder, initResourceFileName, serviceName, engineThreads, config, inputDataModelsAndResources); // remove the file already processed during init from the files list to avoid duplicates watchFolderFiles = ArrayUtils.removeElement(watchFolderFiles, initResourceFileName); } } else { // use the first file in the folder for init final String initResourceFileName = watchFolderFiles[0]; doInit(resourceWatchFolder, initResourceFileName, serviceName, engineThreads, config, inputDataModelsAndResources); // remove the file already processed during init from the files list to avoid duplicates watchFolderFiles = ArrayUtils.removeElement(watchFolderFiles, initResourceFileName); } } else { final String inputDataModelID = config.getProperty(TPUStatics.PROTOTYPE_INPUT_DATA_MODEL_ID_IDENTIFIER); final String resourceID = config.getProperty(TPUStatics.PROTOTYPE_RESOURCE_ID_INDENTIFIER); inputDataModelsAndResources.put(inputDataModelID, Triple.of(inputDataModelID, resourceID, null)); LOG.info("skip init part"); } final Optional<Boolean> optionalDoIngest = TPUUtil.getBooleanConfigValue(TPUStatics.DO_INGEST_IDENTIFIER, config); // ingest if (optionalDoIngest.isPresent() && optionalDoIngest.get()) { final String projectName = config.getProperty(TPUStatics.PROJECT_NAME_IDENTIFIER); if (!optionalAllowMultipleDataModels.isPresent() || !optionalAllowMultipleDataModels.get()) { final Set<Map.Entry<String, Triple<String, String, String>>> entries = inputDataModelsAndResources .entrySet(); final Iterator<Map.Entry<String, Triple<String, String, String>>> iterator = entries.iterator(); final Map.Entry<String, Triple<String, String, String>> entry = iterator.next(); final String inputDataModelID = entry.getKey(); final Triple<String, String, String> triple = entry.getValue(); final String resourceID = triple.getMiddle(); executeIngests(watchFolderFiles, inputDataModelID, resourceID, projectName, serviceName, engineThreads, config); } } else { LOG.info("skip ingest"); } if (!optionalOutputDataModelID.isPresent()) { throw new Exception( "please set an output data model ('prototype.outputDataModelID') for this TPU task"); } final String outputDataModelID = optionalOutputDataModelID.get(); // task execution if (optionalDoTransformations.isPresent() && optionalDoTransformations.get()) { if (optionalAllowMultipleDataModels.isPresent() && optionalAllowMultipleDataModels.get()) { final Set<Map.Entry<String, Triple<String, String, String>>> entries = inputDataModelsAndResources .entrySet(); for (final Map.Entry<String, Triple<String, String, String>> entry : entries) { final String inputDataModelID = entry.getKey(); executeTransform(inputDataModelID, outputDataModelID, optionalDoIngestOnTheFly, optionalDoExportOnTheFly, optionalExportMimeType, optionalExportFileExtension, engineThreads, serviceName, config); } } else { final Set<Map.Entry<String, Triple<String, String, String>>> entries = inputDataModelsAndResources .entrySet(); final Iterator<Map.Entry<String, Triple<String, String, String>>> iterator = entries.iterator(); final Map.Entry<String, Triple<String, String, String>> entry = iterator.next(); final String inputDataModelID = entry.getKey(); executeTransform(inputDataModelID, outputDataModelID, optionalDoIngestOnTheFly, optionalDoExportOnTheFly, optionalExportMimeType, optionalExportFileExtension, engineThreads, serviceName, config); } } else { LOG.info("skip transformations"); } final Optional<Boolean> optionalDoExport = TPUUtil.getBooleanConfigValue(TPUStatics.DO_EXPORT_IDENTIFIER, config); // export if (optionalDoExport.isPresent() && optionalDoExport.get()) { if (!optionalAllowMultipleDataModels.isPresent() || !optionalAllowMultipleDataModels.get()) { final String exportDataModelID; if (outputDataModelID != null && !outputDataModelID.trim().isEmpty()) { exportDataModelID = outputDataModelID; } else { final Set<Map.Entry<String, Triple<String, String, String>>> entries = inputDataModelsAndResources .entrySet(); final Iterator<Map.Entry<String, Triple<String, String, String>>> iterator = entries.iterator(); final Map.Entry<String, Triple<String, String, String>> entry = iterator.next(); exportDataModelID = entry.getKey(); } executeExport(exportDataModelID, optionalExportMimeType, optionalExportFileExtension, engineThreads, serviceName, config); } } else { LOG.info("skip export"); } // clean-up int cnt = 0; final String engineDswarmAPI = config.getProperty(TPUStatics.ENGINE_DSWARM_API_IDENTIFIER); final Set<Map.Entry<String, Triple<String, String, String>>> entries = inputDataModelsAndResources .entrySet(); for (final Map.Entry<String, Triple<String, String, String>> entry : entries) { final Triple<String, String, String> triple = entry.getValue(); final String inputDataModelId = triple.getLeft(); final String resourceId = triple.getMiddle(); final String configurationId = triple.getRight(); TPUUtil.deleteObject(inputDataModelId, DswarmBackendStatics.DATAMODELS_ENDPOINT, serviceName, engineDswarmAPI, cnt); TPUUtil.deleteObject(resourceId, DswarmBackendStatics.RESOURCES_ENDPOINT, serviceName, engineDswarmAPI, cnt); TPUUtil.deleteObject(configurationId, DswarmBackendStatics.CONFIGURATIONS_ENDPOINT, serviceName, engineDswarmAPI, cnt); cnt++; } }
From source file:com.etsy.arbiter.workflow.WorkflowGraphBuilder.java
/** * Recursively insert fork/joins for connected subcomponents of a graph * * @param vertices The set of vertices to process * @param parentGraph The parentGraph graph of these vertices * @return DirectedAcyclicGraph A new graph containing all the given vertices with appropriate fork/join pairs inserted * @throws WorkflowGraphException//from www .j a v a 2 s.co m * @throws DirectedAcyclicGraph.CycleFoundException */ private static DirectedAcyclicGraph<Action, DefaultEdge> buildComponentGraph(Set<Action> vertices, DirectedAcyclicGraph<Action, DefaultEdge> parentGraph) throws WorkflowGraphException, DirectedAcyclicGraph.CycleFoundException { DirectedAcyclicGraph<Action, DefaultEdge> subgraph = buildSubgraph(parentGraph, vertices); // Start by pulling out the vertices with no incoming edges // These can run in parallel in a fork-join Set<Action> initialNodes = new HashSet<>(); for (Action vertex : subgraph.vertexSet()) { if (subgraph.inDegreeOf(vertex) == 0) { initialNodes.add(vertex); } } DirectedAcyclicGraph<Action, DefaultEdge> result = new DirectedAcyclicGraph<>(DefaultEdge.class); if (initialNodes.isEmpty()) { // This is a very odd case, but just in case we'll fail if it happens throw new WorkflowGraphException("No nodes with inDegree = 0 found. This shouldn't happen."); } else if (initialNodes.size() == 1) { // If there is only one node, we can't put it in a fork/join // In this case we'll add just that vertex to the resulting graph Action vertex = initialNodes.iterator().next(); result.addVertex(vertex); // Remove the processed vertex so that we have new unprocessed subcomponents subgraph.removeVertex(vertex); } else { // If there are multiple nodes, insert a fork/join pair to run them in parallel Pair<Action, Action> forkJoin = addForkJoin(result); Action fork = forkJoin.getLeft(); Action join = forkJoin.getRight(); for (Action vertex : initialNodes) { result.addVertex(vertex); result.addDagEdge(fork, vertex); result.addDagEdge(vertex, join); // Remove the processed vertex so that we have new unprocessed subcomponents subgraph.removeVertex(vertex); } } // Now recursively process the graph with the processed nodes removed Triple<DirectedAcyclicGraph<Action, DefaultEdge>, Action, Action> subComponentGraphTriple = processSubcomponents( subgraph); DirectedAcyclicGraph<Action, DefaultEdge> subComponentGraph = subComponentGraphTriple.getLeft(); // Having processed the subcomponents, we attach the "last" node of the graph created here to // the "first" node of the subcomponent graph Action noIncoming = subComponentGraphTriple.getMiddle(); Action noOutgoing = null; for (Action vertex : result.vertexSet()) { if (noOutgoing == null && result.outDegreeOf(vertex) == 0) { noOutgoing = vertex; } } Graphs.addGraph(result, subComponentGraph); if (noOutgoing != null && noIncoming != null && !noOutgoing.equals(noIncoming)) { result.addDagEdge(noOutgoing, noIncoming); } return result; }
From source file:edu.nyu.tandon.tool.BinnedRawHits.java
private static void dumpPosthits(BinnedRawHits query, long numDocs, boolean endRun) { Collections.sort(query.postHits); ImmutableTriple<Integer, Integer, Integer> t; Triple<Integer, Integer, Integer> lastT = new MutableTriple<>(-1, -1, -1); int hits = 0; for (int i = 0; i < query.postHits.size(); i++) { t = query.postHits.get(i);/*from www. j a v a 2 s. co m*/ if (t.compareTo(lastT) != 0) { if (hits > 0) { // format: doc, term, , rank, #hits query.outputPH.printf("%d,%d,%d,%d\n", lastT.getLeft(), lastT.getMiddle(), lastT.getRight(), hits); } hits = 0; lastT = t; } hits++; } query.postHits.clear(); if (query.outputDH != System.out) { query.outputPH.close(); } if (!endRun) { try { query.outputPH = new PrintStream(new FastBufferedOutputStream( new FileOutputStream(outputName + "-" + phBatch++ + ".ph.txt"))); } catch (FileNotFoundException e) { e.printStackTrace(); } } }