Example usage for org.apache.commons.lang3.tuple ImmutablePair ImmutablePair

List of usage examples for org.apache.commons.lang3.tuple ImmutablePair ImmutablePair

Introduction

In this page you can find the example usage for org.apache.commons.lang3.tuple ImmutablePair ImmutablePair.

Prototype

public ImmutablePair(final L left, final R right) 

Source Link

Document

Create a new pair instance.

Usage

From source file:com.yahoo.pulsar.common.naming.NamespaceBundleFactory.java

/**
 * Fetches {@link NamespaceBundles} from cache for a given namespace. finds target bundle, split into numBundles and
 * returns new {@link NamespaceBundles} with newly split bundles into it.
 *
 * @param targetBundle//from   w w w .j av  a2  s.com
 *            {@link NamespaceBundle} needs to be split
 * @param numBundles
 *            split into numBundles
 * @return List of split {@link NamespaceBundle} and {@link NamespaceBundles} that contains final bundles including
 *         split bundles for a given namespace
 * @throws Exception
 */
public Pair<NamespaceBundles, List<NamespaceBundle>> splitBundles(NamespaceBundle targetBundle, int numBundles)
        throws Exception {
    checkNotNull(targetBundle, "can't split null bundle");
    checkNotNull(targetBundle.getNamespaceObject(), "namespace must be present");
    NamespaceName nsname = targetBundle.getNamespaceObject();
    NamespaceBundles sourceBundle = bundlesCache.synchronous().get(nsname);

    final int lastIndex = sourceBundle.partitions.length - 1;

    final long[] partitions = new long[sourceBundle.partitions.length + (numBundles - 1)];
    int pos = 0;
    int splitPartition = -1;
    for (int i = 0; i < lastIndex; i++) {
        final Range<Long> range = targetBundle.getKeyRange();
        if (sourceBundle.partitions[i] == range.lowerEndpoint()
                && (range.upperEndpoint() == sourceBundle.partitions[i + 1])) {
            splitPartition = i;
            Long maxVal = sourceBundle.partitions[i + 1];
            Long minVal = sourceBundle.partitions[i];
            Long segSize = (maxVal - minVal) / numBundles;
            partitions[pos++] = minVal;
            Long curPartition = minVal + segSize;
            for (int j = 0; j < numBundles - 1; j++) {
                partitions[pos++] = curPartition;
                curPartition += segSize;
            }
        } else {
            partitions[pos++] = sourceBundle.partitions[i];
        }
    }
    partitions[pos] = sourceBundle.partitions[lastIndex];
    if (splitPartition != -1) {
        NamespaceBundles splittedNsBundles = new NamespaceBundles(nsname, partitions, this);
        List<NamespaceBundle> splittedBundles = splittedNsBundles.getBundles().subList(splitPartition,
                (splitPartition + numBundles));
        return new ImmutablePair<NamespaceBundles, List<NamespaceBundle>>(splittedNsBundles, splittedBundles);
    }
    return null;
}

From source file:com.deepoove.poi.resolver.TemplateResolver.java

private static void calcTagPosInParagraph(String text, List<Pair<RunEdge, RunEdge>> pairs, List<String> tags) {
    String group = null;/*ww w.  java  2  s .c  om*/
    int start = 0, end = 0;
    Matcher matcher = tagPattern.matcher(text);
    while (matcher.find()) {
        group = matcher.group();
        tags.add(group);
        start = text.indexOf(group, end);
        end = start + group.length();
        pairs.add(new ImmutablePair<RunEdge, RunEdge>(new RunEdge(start, group), new RunEdge(end, group)));
    }
}

From source file:com.uber.hoodie.utilities.sources.KafkaSource.java

@Override
public Pair<Optional<JavaRDD<GenericRecord>>, String> fetchNewData(Optional<String> lastCheckpointStr,
        long maxInputBytes) {

    // Obtain current metadata for the topic
    KafkaCluster cluster = new KafkaCluster(ScalaHelpers.toScalaMap(kafkaParams));
    Either<ArrayBuffer<Throwable>, Set<TopicAndPartition>> either = cluster
            .getPartitions(ScalaHelpers.toScalaSet(new HashSet<>(Arrays.asList(topicName))));
    if (either.isLeft()) {
        // log errors. and bail out.
        throw new HoodieDeltaStreamerException("Error obtaining partition metadata",
                either.left().get().head());
    }//ww  w  . j  ava  2 s.co m
    Set<TopicAndPartition> topicPartitions = either.right().get();

    // Determine the offset ranges to read from
    HashMap<TopicAndPartition, KafkaCluster.LeaderOffset> fromOffsets;
    if (lastCheckpointStr.isPresent()) {
        fromOffsets = CheckpointUtils.strToOffsets(lastCheckpointStr.get());
    } else {
        String autoResetValue = config.getString("auto.offset.reset", Config.DEFAULT_AUTO_RESET_OFFSET);
        if (autoResetValue.equals("smallest")) {
            fromOffsets = new HashMap(
                    ScalaHelpers.toJavaMap(cluster.getEarliestLeaderOffsets(topicPartitions).right().get()));
        } else if (autoResetValue.equals("largest")) {
            fromOffsets = new HashMap(
                    ScalaHelpers.toJavaMap(cluster.getLatestLeaderOffsets(topicPartitions).right().get()));
        } else {
            throw new HoodieNotSupportedException("Auto reset value must be one of 'smallest' or 'largest' ");
        }
    }

    // Always read until the latest offset
    HashMap<TopicAndPartition, KafkaCluster.LeaderOffset> toOffsets = new HashMap(
            ScalaHelpers.toJavaMap(cluster.getLatestLeaderOffsets(topicPartitions).right().get()));

    // Come up with final set of OffsetRanges to read (account for new partitions)
    // TODO(vc): Respect maxInputBytes, by estimating number of messages to read each batch from partition size
    OffsetRange[] offsetRanges = CheckpointUtils.computeOffsetRanges(fromOffsets, toOffsets);
    long totalNewMsgs = CheckpointUtils.totalNewMessages(offsetRanges);
    if (totalNewMsgs <= 0) {
        return new ImmutablePair<>(Optional.empty(), lastCheckpointStr.isPresent() ? lastCheckpointStr.get()
                : CheckpointUtils.offsetsToStr(toOffsets));
    } else {
        log.info("About to read " + totalNewMsgs + " from Kafka for topic :" + topicName);
    }

    // Perform the actual read from Kafka
    JavaRDD<byte[]> kafkaRDD = KafkaUtils.createRDD(sparkContext, byte[].class, byte[].class,
            DefaultDecoder.class, DefaultDecoder.class, kafkaParams, offsetRanges).values();

    // Produce a RDD[GenericRecord]
    final AvroConvertor avroConvertor = new AvroConvertor(schemaProvider.getSourceSchema().toString());
    JavaRDD<GenericRecord> newDataRDD;
    if (dataFormat == SourceDataFormat.AVRO) {
        newDataRDD = kafkaRDD.map(bytes -> avroConvertor.fromAvroBinary(bytes));
    } else if (dataFormat == SourceDataFormat.JSON) {
        newDataRDD = kafkaRDD.map(bytes -> avroConvertor.fromJson(new String(bytes, Charset.forName("utf-8"))));
    } else {
        throw new HoodieNotSupportedException("Unsupport data format :" + dataFormat);
    }

    return new ImmutablePair<>(Optional.of(newDataRDD), CheckpointUtils.offsetsToStr(toOffsets));
}

From source file:com.jkoolcloud.tnt4j.streams.utils.TimestampFormatter.java

/**
 * Scales decimal timestamp value and value units to preserve fractional part of the value.
 * <p>/* ww w .  j a  v a 2  s. com*/
 * Scaling is performed until numeric value expression gets with low (epsilon is {@code 0.001}) or without fraction
 * or {@code units} gets set to {@link TimeUnit#NANOSECONDS}.
 *
 * @param dTime
 *            numeric timestamp value to scale
 * @param units
 *            timestamp value units
 * @return pair of scaled timestamp value and units
 */
public static Pair<Double, TimeUnit> scale(double dTime, TimeUnit units) {
    double fraction = dTime % 1;

    if (!Utils.equals(fraction, 0.0, 0.001)) {
        switch (units) {
        case DAYS:
            dTime = dTime * 24L;
            break;
        case HOURS:
        case MINUTES:
            dTime = dTime * 60L;
            break;
        case SECONDS:
        case MILLISECONDS:
        case MICROSECONDS:
            dTime = dTime * 1000L;
            break;
        case NANOSECONDS:
        default:
            dTime = Math.round(dTime);
            break;
        }

        units = shiftDown(units);

        return scale(dTime, units);
    }

    return new ImmutablePair<>(dTime, units);
}

From source file:it.polimi.diceH2020.SPACE4CloudWS.solvers.solversImpl.QNSolver.QNSolver.java

private Pair<List<File>, List<File>> generateQNModel(@NonNull SolutionPerJob solPerJob) throws IOException {
    List<File> replayerFiles = retrieveInputFiles(solPerJob, ".txt");
    Integer nContainers = solPerJob.getNumberContainers();
    Integer concurrency = solPerJob.getNumberUsers();
    Double think = solPerJob.getJob().getThink();

    QueueingNetworkModel model = ((QNSettings) connSettings).getModel();
    int nMR = (int) solPerJob.getProfile().getProfileMap().keySet().stream().filter(s -> {
        Matcher m = patternNMR.matcher(s);
        return m.matches();
    }).count();//from w w w  .ja  v a  2s  .  c  o  m
    if (nMR > 2) { //TODO verify
        model = QueueingNetworkModel.Q1;
        logger.debug("QN model set to Q1");
    }

    Map<String, String> inputFilesSet = new HashMap<>();
    for (File file : replayerFiles) {
        String name = file.getName();
        Matcher mapMatcher = patternMap.matcher(name);
        Matcher rsMatcher = patternRS.matcher(name);

        String stringToBeReplaced = "";
        if (mapMatcher.find()) {
            stringToBeReplaced = mapMatcher.group(2).toUpperCase();
        } else if (rsMatcher.find()) {
            stringToBeReplaced = rsMatcher.group(2).toUpperCase();
        } else {
            logger.error("Replayer file name does not match the required regex");
        }

        logger.debug("Pattern to replace in jsimg: " + stringToBeReplaced);
        inputFilesSet.put(stringToBeReplaced,
                retrieveRemoteSubDirectory(solPerJob) + File.separator + file.getName());
    }

    Map<String, String> numMR = new HashMap<>();

    for (Entry<String, Double> entry : solPerJob.getProfile().getProfileMap().entrySet()) {
        Matcher m = patternNMR.matcher(entry.getKey());
        if (m.matches()) {
            numMR.put(entry.getKey().toUpperCase(), String.valueOf(entry.getValue().intValue()));
        }
    }

    String jsimgfileContent = new QNFileBuilder().setQueueingNetworkModel(model).setCores(nContainers)
            .setConcurrency(concurrency).setReplayersInputFiles(inputFilesSet).setNumMR(numMR)
            .setThinkRate(1 / think).setAccuracy(connSettings.getAccuracy() / 100)
            .setSignificance(((QNSettings) connSettings).getSignificance()).build();

    File jsimgTempFile = fileUtility.provideTemporaryFile(buildPrefix(solPerJob), ".jsimg");
    fileUtility.writeContentToFile(jsimgfileContent, jsimgTempFile);

    List<File> jmtModel = new ArrayList<>(1);
    jmtModel.add(jsimgTempFile);
    return new ImmutablePair<>(jmtModel, replayerFiles);
}

From source file:fredboat.audio.GuildPlayer.java

private Pair<Boolean, String> canMemberSkipTracks(Member member, List<AudioTrackContext> list) {
    if (PermissionUtil.checkPermission(getGuild(), member, Permission.MESSAGE_MANAGE)) {
        return new ImmutablePair<>(true, null);
    } else {//from   w  w w.j av a  2  s  .  co  m
        //We are not a mod
        int otherPeoplesTracks = 0;

        for (AudioTrackContext atc : list) {
            if (!atc.getMember().equals(member))
                otherPeoplesTracks++;
        }

        if (otherPeoplesTracks > 1) {
            return new ImmutablePair<>(false, I18n.get(getGuild()).getString("skipDeniedTooManyTracks"));
        } else {
            return new ImmutablePair<>(true, null);
        }
    }
}

From source file:io.seldon.recommendation.RecommendationPeer.java

public ImmutablePair<RecommendationResult, RecResultContext> getRecommendations(long user, String client,
        String clientUserId, Integer type, Set<Integer> dimensions, int numRecommendationsAsked,
        String lastRecListUUID, Long currentItemId, String referrer, String recTag,
        List<String> algorithmOverride, Set<Long> scoreItems) {
    ClientStrategy strategy;//from   ww w  .  j  a v  a2  s  .  c  o  m
    if (algorithmOverride != null && !algorithmOverride.isEmpty()) {
        logger.debug("Overriding algorithms from JS");
        strategy = algStore.retrieveStrategy(client, algorithmOverride);
    } else {
        strategy = algStore.retrieveStrategy(client);
    }

    if (strategy == null) {
        throw new APIException(APIException.NOT_VALID_STRATEGY);
    }

    //Set base values - will be used for anonymous users
    int numRecommendations = numRecommendationsAsked;
    int numRecentActions = 0;

    Double diversityLevel = strategy.getDiversityLevel(clientUserId, recTag);
    if (diversityLevel > 1.0f) {
        int numRecommendationsDiverse = new Long(Math.round(numRecommendationsAsked * diversityLevel))
                .intValue();
        if (debugging)
            logger.debug(
                    "Updated num recommendations as for client " + client + " diversity is " + diversityLevel
                            + " was " + numRecommendationsAsked + " will now be " + numRecommendationsDiverse);
        numRecommendations = numRecommendationsDiverse;
    } else
        numRecommendations = numRecommendationsAsked;

    List<Long> recentActions = null;
    if (user != Constants.ANONYMOUS_USER) // only can get recent actions for non anonymous user
    {
        //TODO - fix limit
        recentActions = actionProvider.getRecentActions(client, user, 100);
        numRecentActions = recentActions.size();
        if (debugging)
            logger.debug("RecentActions for user with client " + client + " internal user id " + user + " num."
                    + numRecentActions);
    } else if (debugging)
        logger.debug("Can't get recent actions for anonymous user " + clientUserId);

    Map<Long, Double> recommenderScores = new HashMap<>();
    List<String> algsUsed = new ArrayList<>();
    List<RecResultContext> resultSets = new ArrayList<>();
    AlgorithmResultsCombiner combiner = strategy.getAlgorithmResultsCombiner(clientUserId, recTag);
    for (AlgorithmStrategy algStr : strategy.getAlgorithms(clientUserId, recTag)) {
        long startTime = System.currentTimeMillis();
        if (logger.isDebugEnabled())
            logger.debug("Using recommender class " + algStr.name);

        List<Long> recentItemInteractions;
        // add items from recent history if there are any and algorithm options says to use them
        if (recentActions != null && recentActions.size() > 0)
            recentItemInteractions = new ArrayList<>(recentActions);
        else
            recentItemInteractions = new ArrayList<>();

        // add current item id if not in recent actions
        if (currentItemId != null && !recentItemInteractions.contains(currentItemId))
            recentItemInteractions.add(0, currentItemId);
        FilteredItems explicitItems = null;
        if (scoreItems != null)
            explicitItems = explicitItemsIncluder.create(client, scoreItems);
        RecommendationContext ctxt = RecommendationContext.buildContext(client, algStr, user, clientUserId,
                currentItemId, dimensions, lastRecListUUID, numRecommendations, defaultOptions, explicitItems);
        ItemRecommendationResultSet results = algStr.algorithm.recommend(client, user, dimensions,
                numRecommendations, ctxt, recentItemInteractions);
        if (logger.isDebugEnabled())
            logger.debug("Recommender " + algStr.name + " returned " + results.getResults().size()
                    + " results, took " + (System.currentTimeMillis() - startTime) + "ms");
        resultSets.add(new RecResultContext(results, results.getRecommenderName()));
        if (combiner.isEnoughResults(numRecommendationsAsked, resultSets))
            break;
    }
    RecResultContext combinedResults = combiner.combine(numRecommendations, resultSets);
    if (logger.isDebugEnabled())
        logger.debug("After combining, we have " + combinedResults.resultSet.getResults().size()
                + " results with alg key " + combinedResults.algKey + " : "
                + StringUtils.join(combinedResults.resultSet.getResults(), ':'));
    for (ItemRecommendationResultSet.ItemRecommendationResult result : combinedResults.resultSet.getResults()) {
        recommenderScores.put(result.item, result.score.doubleValue());
    }
    if (recommenderScores.size() > 0) {
        //         switch(options.getPostprocessing())
        //         {
        //            case REORDER_BY_POPULARITY:
        //            {
        //               IBaselineRecommenderUtils baselineUtils = new SqlBaselineRecommenderUtils(options.getName());
        //               BaselineRecommender br = new BaselineRecommender(options.getName(), baselineUtils);
        //               recommenderScores = br.reorderRecommendationsByPopularity(recommenderScores);
        //            }
        //            break;
        //            default:
        //               break;
        //         }
        List<Long> recommendationsFinal = CollectionTools.sortMapAndLimitToList(recommenderScores,
                numRecommendations, true);
        if (logger.isDebugEnabled())
            logger.debug("recommendationsFinal size was " + recommendationsFinal.size());

        final RecommendationResult recommendationResult = createFinalRecResult(numRecommendationsAsked, client,
                clientUserId, dimensions, lastRecListUUID, recommendationsFinal, combinedResults.algKey,
                currentItemId, numRecentActions, diversityLevel, strategy, recTag);
        final ImmutablePair<RecommendationResult, RecResultContext> retVal = new ImmutablePair<>(
                recommendationResult, combinedResults);
        return retVal;
    } else {
        logger.warn("Returning no recommendations for user with client id " + clientUserId);
        final RecommendationResult recommendationResult = createFinalRecResult(numRecommendationsAsked, client,
                clientUserId, dimensions, lastRecListUUID, new ArrayList<Long>(), "", currentItemId,
                numRecentActions, diversityLevel, strategy, recTag);
        final String algKey = "";
        final ImmutablePair<RecommendationResult, RecResultContext> retVal = new ImmutablePair<>(
                recommendationResult, combinedResults);
        return retVal;
    }
}

From source file:com.epam.catgenome.manager.TrackHelper.java

/**
 * Fills track with blocks, created by createNewBlockFunction function
 * @param track a Track to fill/*from w w  w .  ja  v a 2 s  .  com*/
 * @param createNewBlockFunction a function, that generates blocks
 * @param <T> Track's type
 */
public static <T extends Block> void fillBlocks(final Track<T> track,
        final Function<Pair<Integer, Integer>, T> createNewBlockFunction) {
    final List<T> list = new ArrayList<>();
    final double scaleFactor = track.getScaleFactor();
    final int endIndex = track.getEndIndex();
    final int step = (int) Math.max(1, Math.round(1.0 / scaleFactor));
    int start = track.getStartIndex();
    int helpEnd = start + step - 1;
    while (helpEnd <= endIndex) {
        list.add(createNewBlockFunction.apply(new ImmutablePair<>(start, helpEnd)));
        start = helpEnd + 1;
        helpEnd += step;
    }
    if (start <= endIndex) {
        list.add(createNewBlockFunction.apply(new ImmutablePair<>(start, endIndex)));
    }
    track.setBlocks(list);
}

From source file:com.addthis.hydra.kafka.consumer.ConsumerUtils.java

public static Pair<ConsumerConnector, Map<String, List<KafkaStream<Bundle, Bundle>>>> newBundleStreams(
        String zookeeper, Map<String, Integer> topicStreams, Map<String, String> overrides) {
    ConsumerConnector connector = kafka.consumer.Consumer
            .createJavaConsumerConnector(newConsumerConfig(zookeeper, overrides));
    return new ImmutablePair<>(connector,
            connector.createMessageStreams(topicStreams, new BundleDecoder(), new BundleDecoder()));
}

From source file:io.lavagna.web.api.CardControllerTest.java

@Test
public void createWithDueDate() {
    CardData cardData = new CardData();
    cardData.setName("name");

    BulkOperation dueDate = new BulkOperation(1, new CardLabelValue.LabelValue(new Date()),
            Collections.<Integer>emptyList());

    cardData.setDueDate(dueDate);/*from ww  w. ja  va 2  s  .c o  m*/

    ImmutablePair<List<Integer>, List<Integer>> result = new ImmutablePair<>(cardIds, cardIds);

    when(cardService.createCard(eq("name"), eq(columnId), any(Date.class), eq(user))).thenReturn(card);
    when(bulkOperationService.setDueDate(eq(projectShortName), eq(cardIds),
            any(CardLabelValue.LabelValue.class), eq(user))).thenReturn(result);

    cardController.create(columnId, cardData, user);

    verify(cardService).createCard(eq("name"), eq(columnId), any(Date.class), eq(user));
    verify(eventEmitter).emitCreateCard(project.getShortName(), board.getShortName(), boardColumn.getId(), card,
            user);
    verify(bulkOperationService).setDueDate(eq(projectShortName), eq(cardIds),
            any(CardLabelValue.LabelValue.class), eq(user));

    verify(cardDataService, never()).updateDescription(eq(cardId), anyString(), any(Date.class), eq(userId));
    verify(bulkOperationService, never()).addUserLabel(eq(projectShortName), anyInt(),
            any(CardLabelValue.LabelValue.class), ArgumentMatchers.<Integer>anyList(), eq(user));
    verify(bulkOperationService, never()).setMilestone(eq(projectShortName),
            ArgumentMatchers.<Integer>anyList(), any(CardLabelValue.LabelValue.class), eq(user));
    verify(bulkOperationService, never()).assign(eq(projectShortName), ArgumentMatchers.<Integer>anyList(),
            any(CardLabelValue.LabelValue.class), eq(user));
    verify(cardDataService, never()).assignFileToCard(anyString(), anyString(), anyInt(), eq(user),
            any(Date.class));
}