List of usage examples for org.apache.commons.lang3.tuple Pair of
public static <L, R> Pair<L, R> of(final L left, final R right)
Obtains an immutable pair of from two objects inferring the generic types.
This factory allows the pair to be created using inference to obtain the generic types.
From source file:enumj.StreamComparator.java
private Pair<Function<Stream<T>, Stream<T>>, Function<E, E>> getSortedFuns() { final Function<Stream<T>, Stream<T>> lhs = s -> s.sorted(); final Function<E, E> rhs = e -> sorted(e); if (statistics != null) { statistics.sorted();/*www.ja v a2s . co m*/ } return Pair.of(lhs, rhs); }
From source file:com.uber.hoodie.common.util.TestCompactionUtils.java
/** * Generate input for compaction plan tests */// w ww .j a v a 2 s. c o m private Pair<List<Pair<String, FileSlice>>, HoodieCompactionPlan> buildCompactionPlan() { FileSlice emptyFileSlice = new FileSlice("000", "empty1"); FileSlice fileSlice = new FileSlice("000", "noData1"); fileSlice.setDataFile(new TestHoodieDataFile("/tmp/noLog.parquet")); fileSlice.addLogFile(new HoodieLogFile(new Path(FSUtils.makeLogFileName("noData1", ".log", "000", 1)))); fileSlice.addLogFile(new HoodieLogFile(new Path(FSUtils.makeLogFileName("noData1", ".log", "000", 2)))); FileSlice noLogFileSlice = new FileSlice("000", "noLog1"); noLogFileSlice.setDataFile(new TestHoodieDataFile("/tmp/noLog.parquet")); FileSlice noDataFileSlice = new FileSlice("000", "noData1"); noDataFileSlice .addLogFile(new HoodieLogFile(new Path(FSUtils.makeLogFileName("noData1", ".log", "000", 1)))); noDataFileSlice .addLogFile(new HoodieLogFile(new Path(FSUtils.makeLogFileName("noData1", ".log", "000", 2)))); List<FileSlice> fileSliceList = Arrays.asList(emptyFileSlice, noDataFileSlice, fileSlice, noLogFileSlice); List<Pair<String, FileSlice>> input = fileSliceList.stream() .map(f -> Pair.of(DEFAULT_PARTITION_PATHS[0], f)).collect(Collectors.toList()); return Pair.of(input, CompactionUtils.buildFromFileSlices(input, Optional.empty(), Optional.of(metricsCaptureFn))); }
From source file:com.teradata.tempto.internal.hadoop.hdfs.WebHDFSClient.java
private void saveFile(String path, String username, HttpEntity entity) { Pair<String, String> params = Pair.of("overwrite", "true"); String writeRedirectUri = executeAndGetRedirectUri(new HttpPut(buildUri(path, username, "CREATE", params))); HttpPut writeRequest = new HttpPut(writeRedirectUri); writeRequest.setEntity(entity);/*from w w w. j a va 2 s . c o m*/ try (CloseableHttpResponse response = httpClient.execute(writeRequest)) { if (response.getStatusLine().getStatusCode() != SC_CREATED) { throw invalidStatusException("CREATE", path, username, writeRequest, response); } long length = waitForFileSavedAndReturnLength(path, username); logger.debug("Saved file {} - username: {}, size: {}", path, username, byteCountToDisplaySize(length)); } catch (IOException e) { throw new RuntimeException("Could not save file " + path + " in hdfs, user: " + username, e); } }
From source file:com.silverpeas.notation.model.RatingDAOTest.java
@Test public void moveRating() throws Exception { IDataSet actualDataSet = getActualDataSet(); ITable table = actualDataSet.getTable("sb_notation_notation"); int[] aimedIds = new int[] { 1, 2, 3, 7 }; Map<Integer, Pair<String, Integer>> raterRatings = new HashMap<Integer, Pair<String, Integer>>(); for (int id : aimedIds) { int index = getTableIndexForId(table, id); assertThat((Integer) table.getValue(index, "id"), is(id)); assertThat((String) table.getValue(index, "instanceId"), is(INSTANCE_ID)); assertThat((String) table.getValue(index, "externalId"), is(CONTRIBUTION_ID)); assertThat((String) table.getValue(index, "externalType"), is(CONTRIBUTION_TYPE)); raterRatings.put(id,/* w w w.j ava 2 s.c o m*/ Pair.of((String) table.getValue(index, "author"), (Integer) table.getValue(index, "note"))); } long nbMoved = RatingDAO.moveRatings(getConnection(), new ContributionRatingPK(CONTRIBUTION_ID, INSTANCE_ID, CONTRIBUTION_TYPE), "otherInstanceId"); actualDataSet = getActualDataSet(); table = actualDataSet.getTable("sb_notation_notation"); assertThat(table.getRowCount(), is(RATING_ROW_COUNT)); assertThat(nbMoved, is((long) aimedIds.length)); for (int id : aimedIds) { int index = getTableIndexForId(table, id); assertThat((Integer) table.getValue(index, "id"), is(id)); assertThat((String) table.getValue(index, "instanceId"), is("otherInstanceId")); assertThat((String) table.getValue(index, "externalId"), is(CONTRIBUTION_ID)); assertThat((String) table.getValue(index, "externalType"), is(CONTRIBUTION_TYPE)); Pair<String, Integer> raterRating = raterRatings.get(id); assertThat((String) table.getValue(index, "author"), is(raterRating.getLeft())); assertThat((Integer) table.getValue(index, "note"), is(raterRating.getRight())); } }
From source file:com.formkiq.core.service.workflow.WorkflowServiceImplTest.java
/** * Expect Start WebFlow.// w w w.j av a 2 s.c o m * @param f {@link FormJSON} * @return {@link Pair} * @throws IOException IOException */ private Pair<ModelAndView, WebFlow> expectStart(final FormJSON f) throws IOException { Map<String, String> errors = ImmutableMap.of("k1", "v1"); List<String> steps = f != null ? Arrays.asList(f.getUUID(), f.getUUID()) : Collections.emptyList(); expect(this.archiveService.get(this.folder, this.uuid, true)) .andReturn(Pair.of(this.archive, UUID.randomUUID().toString())); expect(this.archive.getWorkflow()).andReturn(this.workflow); expect(this.workflow.getSteps()).andReturn(steps); if (f != null) { expect(this.archive.getForm(f.getUUID())).andReturn(f).times(2); expect(this.validatorService.validateFormJSON(this.archive, this.form, FormJSONRequiredType.BEFORE_SUBMIT, FormJSONRequiredType.IMMEDIATE)).andReturn(errors).times(2); } replayAll(); return this.ws.start(this.req, WorkflowEditorServiceImpl.class, this.folder, this.uuid); }
From source file:com.formkiq.core.service.ArchiveServiceImpl.java
@Override public Pair<ArchiveDTO, String> get(final String folder, final String uuid, final boolean resetUUID) throws IOException { Pair<byte[], String> p = this.folderService.findFormData(folder, uuid); String sha1hash = p.getRight(); ArchiveDTO archive = extractJSONFromZipFile(p.getLeft()); // TODO remove.. for (String formUUID : archive.getWorkflow().getSteps()) { if (!archive.getForms().containsKey(formUUID)) { byte[] d = this.folderService.findFormData(folder, formUUID).getLeft(); ArchiveDTO fa = extractJSONFromZipFile(d); archive.getForms().putAll(fa.getForms()); }//from w w w. ja va 2s . co m } if (resetUUID) { resetUUID(archive); } return Pair.of(archive, sha1hash); }
From source file:com.streamsets.pipeline.stage.processor.kv.redis.RedisLookupIT.java
@Test @SuppressWarnings("unchecked") public void testGetEmptyKey() throws Exception { RedisLookupConfig conf = new RedisLookupConfig(); conf.cache.enabled = false;/* ww w . ja v a 2s .c o m*/ conf.uri = "redis://" + redis.getContainerIpAddress() + ":" + redis.getMappedPort(REDIS_PORT); RedisStore redisStore = new RedisStore(conf); LookupValue value = redisStore.get(Pair.of("", DataType.STRING)); redisStore.close(); assertTrue(!Optional.fromNullable((String) value.getValue()).isPresent()); }
From source file:com.act.reachables.Network.java
public Edge getEdge(Node src, Node dst) { return this.edgeHash.get(Pair.of(src, dst)); }
From source file:com.qwazr.webapps.example.DocumentationServlet.java
protected Pair<String, String[]> getRemoteLink(String path) { if (StringUtils.isEmpty(path)) return null; String[] parts = StringUtils.split(path, '/'); if (parts.length > 0) { path = remotePrefix + parts[0];/* w ww .j ava 2 s. c om*/ int i = 0; for (String part : parts) if (i++ > 0) path += '/' + part; } return Pair.of(path, parts); }
From source file:net.lldp.checksims.util.PairGeneratorTest.java
@Test public void TestGeneratePairsWithArchiveTwoElementArchive() { Set<Submission> submissions = setFromElements(a, b, c); Set<Submission> archive = setFromElements(d, e); Set<Pair<Submission, Submission>> expected = setFromElements(Pair.of(a, b), Pair.of(a, c), Pair.of(b, c), Pair.of(a, d), Pair.of(b, d), Pair.of(c, d), Pair.of(a, e), Pair.of(b, e), Pair.of(c, e)); Set<Pair<Submission, Submission>> results = PairGenerator.generatePairsWithArchive(submissions, archive); checkPairsAreInSet(results, expected); }