Example usage for org.apache.commons.lang3.tuple Pair getRight

List of usage examples for org.apache.commons.lang3.tuple Pair getRight

Introduction

In this page you can find the example usage for org.apache.commons.lang3.tuple Pair getRight.

Prototype

public abstract R getRight();

Source Link

Document

Gets the right element from this pair.

When treated as a key-value pair, this is the value.

Usage

From source file:com.qwazr.utils.json.DirectoryJsonManager.java

private T getNoLock(File file, String name, AtomicBoolean mustBeEvaluated) throws IOException {
    Pair<Long, T> item = instancesCache.get(name);
    long lastModified = file.lastModified();
    if (file.exists()) {
        if (item != null && item.getLeft() == lastModified)
            return item.getRight();
        if (mustBeEvaluated == null) {
            item = loadItem(name, file, lastModified);
            buildCache();/*ww w .  j a v a 2 s.  c o m*/
            return item.getRight();
        }
    } else {
        if (item == null)
            return null;
        if (mustBeEvaluated == null) {
            instancesMap.remove(name);
            buildCache();
            return null;
        }
    }
    mustBeEvaluated.set(true);
    return null;
}

From source file:com.formkiq.core.service.ArchiveServiceImpl.java

@Override
public Pair<ArchiveDTO, String> get(final String folder, final String uuid, final boolean resetUUID)
        throws IOException {

    Pair<byte[], String> p = this.folderService.findFormData(folder, uuid);
    String sha1hash = p.getRight();

    ArchiveDTO archive = extractJSONFromZipFile(p.getLeft());

    // TODO remove..
    for (String formUUID : archive.getWorkflow().getSteps()) {
        if (!archive.getForms().containsKey(formUUID)) {

            byte[] d = this.folderService.findFormData(folder, formUUID).getLeft();

            ArchiveDTO fa = extractJSONFromZipFile(d);
            archive.getForms().putAll(fa.getForms());
        }// w ww  . ja va 2  s.c  om
    }

    if (resetUUID) {
        resetUUID(archive);
    }

    return Pair.of(archive, sha1hash);
}

From source file:com.github.steveash.jg2p.align.AlignerTrainer.java

private double maximization() {
    smoothCounts();/*from   w  ww . ja  va2  s .  co m*/
    ProbTable.Marginals marginals = counts.calculateMarginals();
    double totalChange = 0;
    double unsuperFactor = (1.0 - trainOpts.semiSupervisedFactor);
    double superFactor = trainOpts.semiSupervisedFactor;

    for (Pair<String, String> xy : ProbTable.unionOfAllCells(counts, labelledProbs)) {
        String x = xy.getLeft();
        String y = xy.getRight();
        double countExp = counts.prob(x, y);
        double unsupervised = trainOpts.maximizer.maximize(immutableCell(x, y, countExp), marginals);
        double supervised = labelledProbs.prob(x, y);
        double update = (unsuperFactor * unsupervised) + (superFactor * supervised);
        assertProb(update);

        double current = probs.prob(x, y);
        totalChange += Math.abs(current - update);
        probs.setProb(x, y, update);
    }

    counts.clear();
    return trainOpts.maximizer.normalize(totalChange, marginals);
}

From source file:com.jaspersoft.jasperserver.jrsh.operation.grammar.parser.PlainGrammarParser.java

protected void buildGraphEdges() {
    for (val entry : dependencies.entrySet()) {
        Pair<Token, String[]> tokenPair = entry.getValue();
        for (String dependencyName : tokenPair.getRight()) {
            Pair<Token, String[]> dependency = dependencies.get(dependencyName);
            graph.addEdge(dependency.getLeft(), tokenPair.getLeft());
        }/*from www .jav  a  2s  . c  o m*/
    }
}

From source file:com.act.biointerpretation.desalting.DesalterTest.java

@Test
public void testDesaltingDetectsAndCountsRepeatedFragments() throws Exception {
    List<Pair<String, Map<String, Integer>>> testCases = new ArrayList<Pair<String, Map<String, Integer>>>() {
        {//  www .  j  av a  2s .  c  o m
            add(Pair.of( // Phenanthroline!
                    "InChI=1S/2C12H8N2.2ClH.Ru/c2*1-3-9-5-6-10-4-2-8-14-12(10)11(9)13-7-1;;;/h2*1-8H;2*1H;/q2*-2;;;+9",
                    new HashMap<String, Integer>() {
                        {
                            put("InChI=1S/C12H8N2/c1-3-9-5-6-10-4-2-8-14-12(10)11(9)13-7-1/h1-8H/q-2", 2);
                        }
                    }));
            add(Pair.of( // Cyanide!
                    "InChI=1S/12CN.2Fe.2H/c12*1-2;;;;/q12*-1;+2;+3;;", new HashMap<String, Integer>() {
                        {
                            put("InChI=1S/CN/c1-2/q-1", 12);
                        }
                    }));
            add(Pair.of( // Bypyradine!
                    "InChI=1S/2C10H10N2.2ClH.Ru/c2*1-3-7-11-9(5-1)10-6-2-4-8-12-10;;;/h2*1-10H;2*1H;/q2*-2;;;+8/p-2",
                    new HashMap<String, Integer>() {
                        {
                            put("InChI=1S/C10H10N2/c1-3-7-11-9(5-1)10-6-2-4-8-12-10/h1-10H/q-2", 2);
                        }
                    }));
            add(Pair.of( // Cyclopentadien!
                    "InChI=1S/2C5H5.F6P.Fe/c2*1-2-4-5-3-1;1-7(2,3,4,5)6;/h2*1-5H;;/q3*-1;+3",
                    new HashMap<String, Integer>() {
                        {
                            put("InChI=1S/C5H5/c1-2-4-5-3-1/h1-5H/q-1", 2);
                        }
                    }));
            add(Pair.of( // Citrate!  (Bonus: multiple copper ions.)
                    "InChI=1S/2C6H8O7.3Cu/c2*7-3(8)1-6(13,5(11)12)2-4(9)10;;;/h2*13H,1-2H2,(H,7,8)(H,9,10)(H,11,12);;;/q;;3*+2/p-6",
                    new HashMap<String, Integer>() {
                        {
                            put("InChI=1S/C6H8O7/c7-3(8)1-6(13,5(11)12)2-4(9)10/h13H,1-2H2,(H,7,8)(H,9,10)(H,11,12)",
                                    2);
                        }
                    }));
        }
    };

    Desalter desalter = new Desalter(new ReactionProjector());
    desalter.initReactors();

    for (Pair<String, Map<String, Integer>> testCase : testCases) {
        String inchi = testCase.getLeft();
        Map<String, Integer> expectedFragmentCounts = testCase.getRight();
        Map<String, Integer> actual = desalter.desaltInchi(inchi);
        assertEquals(String.format("Fragments and counts match for %s", inchi), expectedFragmentCounts, actual);
    }
}

From source file:com.pinterest.terrapin.hadoop.BaseUploader.java

public void upload(String clusterName, String fileSet, Options options) throws Exception {
    List<Pair<Path, Long>> fileSizePairList = getFileList();

    int numShards = fileSizePairList.size();
    LOG.info("Got " + numShards + " files.");
    if (numShards == 0) {
        LOG.warn("No files found. Exiting.");
        System.exit(1);//from   w  w w  .  j  a  v  a2  s.co m
    }

    List<Path> parts = Lists.transform(fileSizePairList, new Function<Pair<Path, Long>, Path>() {
        @Override
        public Path apply(Pair<Path, Long> pathLongPair) {
            return pathLongPair.getKey();
        }
    });
    PartitionerType partitionerType = options.getPartitioner();

    validate(parts, partitionerType, numShards);
    long maxSize = -1;
    for (Pair<Path, Long> fileSizePair : fileSizePairList) {
        long size = fileSizePair.getRight();
        if (maxSize < size) {
            maxSize = size;
        }
    }
    // Come up with a new timestamp epoch for the latest data.
    long timestampEpochMillis = System.currentTimeMillis();
    String hdfsDir = Constants.HDFS_DATA_DIR + "/" + fileSet + "/" + timestampEpochMillis;
    ZooKeeperManager zkManager = getZKManager(clusterName);
    FileSetInfo fileSetInfo = new FileSetInfo(fileSet, hdfsDir, numShards, (List) Lists.newArrayList(),
            options);

    int replicationFactor = Constants.DEFAULT_HDFS_REPLICATION;
    if (terrapinNamenode == null || terrapinNamenode.isEmpty()) {
        ClusterInfo info = zkManager.getClusterInfo();
        if (info == null) {
            LOG.error("Could not find the namenode for " + clusterName);
            System.exit(1);
        }
        if (info.hdfsNameNode == null || info.hdfsNameNode.isEmpty()) {
            LOG.error("Could not find the namenode for " + clusterName);
            System.exit(1);
        }
        this.terrapinNamenode = info.hdfsNameNode;
        replicationFactor = info.hdfsReplicationFactor;
    }
    // Connect to the zookeeper and establish a lock on the fileset.
    LOG.info("Locking fileset " + fileSet);
    zkManager.lockFileSet(fileSet, fileSetInfo);

    try {
        LOG.info("Uploading " + numShards + " files through distcp to " + hdfsDir);

        // TODO: Add check for cluster disk space.
        List<Path> sourceFiles = Lists.newArrayListWithCapacity(fileSizePairList.size());
        for (Pair<Path, Long> fileSize : fileSizePairList) {
            sourceFiles.add(fileSize.getLeft());
        }
        if (sourceFiles.size() == 1) {
            hdfsDir = hdfsDir + "/" + TerrapinUtil.formatPartitionName(0);
        }
        DistCpOptions distCpOptions = new DistCpOptions(sourceFiles,
                new Path("hdfs", terrapinNamenode, hdfsDir));
        distCpOptions.setSyncFolder(true);
        distCpOptions.setSkipCRC(true);

        if (maxSize > Constants.DEFAULT_MAX_SHARD_SIZE_BYTES) {
            LOG.warn("Largest shard is " + maxSize + " bytes. This is more than 4G. "
                    + "Increase the # of shards to reduce the size.");
            System.exit(1);
        }
        TerrapinUtil.setupConfiguration(conf, maxSize, replicationFactor);

        DistCp distCp = getDistCp(conf, distCpOptions);
        Job job = distCp.execute();
        if (!job.waitForCompletion(true)) {
            throw new RuntimeException("Distributed copy failed.");
        }

        LOG.info("Successfully copied data.");

        loadFileSetData(zkManager, fileSetInfo, options);

        // Wait for a while so that zookeeper watches have propagated before relinquishing the lock.
        try {
            LOG.info("Releasing file set lock.");
            Thread.sleep(5000);
        } catch (InterruptedException ie) {
            LOG.warn("Interrupted.");
        }
    } finally {
        zkManager.unlockFileSet(fileSet);
    }
}

From source file:net.malisis.blocks.renderer.MixedBlockRenderer.java

private boolean setup() {
    if (renderType == RenderType.ITEM) {
        if (!itemStack.hasTagCompound())
            return false;

        Pair<IBlockState, IBlockState> pair = MixedBlockBlockItem.readNBT(itemStack.getTagCompound());
        state1 = pair.getLeft();/*  w  ww.jav a2  s. co  m*/
        state2 = pair.getRight();

        mixedBlockState = ((MixedBlock) block).getDefaultState().withProperty(DirectionalComponent.ALL,
                EnumFacing.SOUTH);
    } else if (renderType == RenderType.BLOCK) {
        tileEntity = (MixedBlockTileEntity) super.tileEntity;
        if (tileEntity == null)
            return false;
        state1 = tileEntity.getState1();
        state2 = tileEntity.getState2();

        mixedBlockState = blockState;
    }

    if (state1 == null || state2 == null)
        return false;
    return true;
}

From source file:alfio.controller.api.admin.AdminReservationApiController.java

@RequestMapping(value = "/event/{eventName}/reservations/list", method = RequestMethod.GET)
public PageAndContent<List<TicketReservation>> findAll(@PathVariable("eventName") String eventName,
        @RequestParam(value = "page", required = false) Integer page,
        @RequestParam(value = "search", required = false) String search,
        @RequestParam(value = "status", required = false) List<TicketReservation.TicketReservationStatus> status,
        Principal principal) {/*from w  w  w .j av a 2 s .  c  o  m*/
    Event event = eventRepository.findByShortName(eventName);
    eventManager.checkOwnership(event, principal.getName(), event.getOrganizationId());
    Pair<List<TicketReservation>, Integer> res = ticketReservationManager
            .findAllReservationsInEvent(event.getId(), page, search, status);
    return new PageAndContent<>(res.getLeft(), res.getRight());
}

From source file:eu.stratosphere.nephele.streaming.taskmanager.chaining.TaskChainer.java

/**
 * Searches for chains that continually use up more than one CPU core and
 * splits all of them up into two chains in the background.
 *///w  w w.j  a v  a2s .  c o m
private void splitChainsIfNecessary() {

    int currChainIndex = 0;
    while (currChainIndex < this.chains.size()) {
        TaskChain chain = this.chains.get(currChainIndex);

        if (chain.hasCPUUtilizationMeasurements() && chain.getCPUUtilization() > 99
                && chain.getNumberOfChainedTasks() > 1) {

            Pair<TaskChain, TaskChain> splitResult = splitChain(chain);
            this.chains.set(currChainIndex, splitResult.getLeft());
            this.chains.add(currChainIndex + 1, splitResult.getRight());
            currChainIndex = currChainIndex + 2;
        } else {
            currChainIndex++;
        }
    }
}

From source file:io.cloudslang.lang.compiler.parser.MetadataParser.java

private void handleDescriptionLineVariableDeclarationOnlySyntax(DescriptionBuilder descriptionBuilder,
        String currentLine) {//from   w w w.  j  a  va2s.  co  m
    // if description is opened
    if (descriptionBuilder.descriptionOpened()) {
        // add
        Pair<String, String> data = descriptionPatternMatcher
                .getDescriptionVariableLineDataDeclarationOnly(currentLine);
        descriptionBuilder.addToDescription(data.getLeft(), data.getRight());
    }
    // otherwise ignore
}