Example usage for com.google.common.collect TreeBasedTable create

List of usage examples for com.google.common.collect TreeBasedTable create

Introduction

In this page you can find the example usage for com.google.common.collect TreeBasedTable create.

Prototype

public static <R, C, V> TreeBasedTable<R, C, V> create(Comparator<? super R> rowComparator,
        Comparator<? super C> columnComparator) 

Source Link

Document

Creates an empty TreeBasedTable that is ordered by the specified comparators.

Usage

From source file:com.publictransitanalytics.scoregenerator.testhelpers.PreloadedStopTimesDirectory.java

public PreloadedStopTimesDirectory(final Set<TripStop> tripStops) {
    tripTable = TreeBasedTable.create((TripId a, TripId b) -> a.toString().compareTo(b.toString()),
            (TransitTime a, TransitTime b) -> a.compareTo(b));
    stopTable = TreeBasedTable.create();
    for (final TripStop tripStop : tripStops) {
        tripTable.put(tripStop.getTripId(), tripStop.getTime(), tripStop);
        stopTable.put(tripStop.getStopId(), tripStop.getTime(), tripStop);
    }/*from  ww w  .j  a v a2 s . co m*/
}

From source file:com.publictransitanalytics.scoregenerator.schedule.TripCreatingTransitNetwork.java

private static TreeBasedTable<TransitStop, EntryPointTimeKey, EntryPoint> makeEntyPointTable(
        final Set<Trip> trips) {
    final TreeBasedTable<TransitStop, EntryPointTimeKey, EntryPoint> entryPoints = TreeBasedTable.create(
            (stop1, stop2) -> stop1.getIdentifier().compareTo(stop2.getIdentifier()),
            (time1, time2) -> time1.compareTo(time2));
    for (final Trip trip : trips) {
        final List<VehicleEvent> schedule = trip.getSchedule();
        for (int i = 0; i < schedule.size(); i++) {
            final VehicleEvent scheduledLocation = schedule.get(i);
            final LocalDateTime time = scheduledLocation.getScheduledTime();
            final EntryPointTimeKey timeKey = new EntryPointTimeKey(time);
            final TransitStop transitStop = scheduledLocation.getLocation();
            final EntryPoint entryPoint = new EntryPoint(trip, time, i);
            entryPoints.put(transitStop, timeKey, entryPoint);
        }//from   ww  w  .  j  a va  2 s  .  co  m
    }
    return entryPoints;
}

From source file:de.tudarmstadt.ukp.dkpro.keyphrases.bookindexing.aggregation.RankedPhraseAggregationAnnotator.java

/**
 * Iterates through {@link Segment}s and aggregates all {@link Keyphrase}s as
 * specified in the concrete {@link AggregationStrategy}.
 *
 * @param jcas/*from  w  w  w.j a v  a2s .  c o m*/
 * @return table representing phrases contained in segments
 * @throws AnalysisEngineProcessException
 */
private Table<String, Integer, Double> getTable(JCas jcas) throws AnalysisEngineProcessException {
    Table<String, Integer, Double> phraseSegmentTable = TreeBasedTable.create(new Comparator<String>() {
        @Override
        public int compare(String o1, String o2) {
            return o1.compareTo(o2);
        }
    }, new Comparator<Integer>() {
        @Override
        public int compare(Integer o1, Integer o2) {
            return o1.compareTo(o2);
        }
    });

    // there have to be segments in the JCas
    assert jcas.getAnnotationIndex(Segment.type).size() > 0;

    if (getContext().getLogger().isLoggable(Level.INFO)) {
        getContext().getLogger().log(Level.INFO,
                String.format("Found %d segments", jcas.getAnnotationIndex(Segment.type).size()));
    }

    // iterate through all segments and add all phrases with their score to
    // the table
    int segmentNr = 0;
    for (Segment segment : JCasUtil.select(jcas, Segment.class)) {

        // aggregate all (keyphrase, segment) -> score mappings in a table
        for (Keyphrase keyphrase : JCasUtil.selectCovered(Keyphrase.class, segment)) {

            String phrase = convertToLowercase ? keyphrase.getKeyphrase().toLowerCase()
                    : keyphrase.getKeyphrase();

            if (getContext().getLogger().isLoggable(Level.FINEST)) {
                getContext().getLogger().log(Level.FINEST, String.format(Locale.US,
                        "(Phrase=[%s], SegNr=[%d]) -> Score=[%.3f]", phrase, segmentNr, keyphrase.getScore()));
            }

            phraseSegmentTable.put(phrase, segmentNr, keyphrase.getScore());
        }
        segmentNr++;
    }
    return phraseSegmentTable;
}

From source file:de.tudarmstadt.ukp.dkpro.wsd.evaluation.EvaluationTable.java

@Override
public void process(JCas aJCas) throws AnalysisEngineProcessException {
    super.process(aJCas);
    Table<WSDItem, String, FSArray> testResults = TreeBasedTable.create(new WSDItemComparator(),
            new StringComparator());

    for (WSDResult r : JCasUtil.select(aJCas, WSDResult.class)) {
        if (r.getWsdItem() == null) {
            logger.info("skipping " + r.getDisambiguationMethod() + " result for \"" + r.getCoveredText()
                    + "\" because it has no instance");
            continue;
        }/*from   w ww. j  a v  a2  s . c om*/
        if (r.getSenses() == null) {
            logger.info("skipping " + r.getDisambiguationMethod() + " result for " + r.getWsdItem().getId()
                    + " because no senses are assigned");
            continue;
        }
        if (ignoreResult(r)) {
            logger.info(goldStandardAlgorithm + " result for " + r.getWsdItem().getId()
                    + " matches the ignore pattern");
        } else {
            testResults.put(r.getWsdItem(), r.getDisambiguationMethod(), r.getSenses());
        }
    }

    try {
        beginDocument("Document");
        beginTable(testResults.rowKeySet().size(), testResults.columnKeySet().size());
        beginTableRow();
        tableHeader("instance");
        tableHeader(goldStandardAlgorithm);
        for (String testAlgorithm : testResults.columnKeySet()) {
            if (!testAlgorithm.equals(goldStandardAlgorithm)) {
                tableHeader(testAlgorithm);
            }
        }
        endTableRow();
        for (WSDItem wsdItem : testResults.rowKeySet()) {
            if (maxItemsAttempted >= 0 && numItemsAttempted++ >= maxItemsAttempted) {
                break;
            }
            FSArray goldResults = testResults.get(wsdItem, goldStandardAlgorithm);
            beginTableRow();
            tableHeaderInstance(wsdItem);
            tableCellGoldResults(goldResults);
            for (String testAlgorithm : testResults.columnKeySet()) {
                if (!testAlgorithm.equals(goldStandardAlgorithm)) {
                    tableCellTestResults(goldResults, testResults.get(wsdItem, testAlgorithm));
                }
            }
            endTableRow();
        }
        endTable();
        endDocument();
    } catch (IOException e) {
        throw new AnalysisEngineProcessException(e);
    }
}

From source file:eu.interedition.collatex.util.VariantGraphRanking.java

public RowSortedTable<Integer, Witness, Set<Token>> asTable() {
    final TreeBasedTable<Integer, Witness, Set<Token>> table = TreeBasedTable.create(Ordering.natural(),
            Witness.SIGIL_COMPARATOR);/*from w ww.j  av  a  2s  . c o m*/
    for (Map.Entry<VariantGraph.Vertex, Integer> rank : byVertex.entrySet()) {
        final int row = rank.getValue();
        for (Token token : rank.getKey().tokens(witnesses)) {
            final Witness column = token.getWitness();

            Set<Token> cell = table.get(row, column);
            if (cell == null) {
                table.put(row, column, cell = Sets.newHashSet());
            }
            cell.add(token);
        }
    }
    return table;
}

From source file:org.apache.apex.malhar.lib.state.managed.BucketsFileSystem.java

/**
 * Saves data to a bucket. The data consists of key/values of all time-buckets of a particular bucket.
 *
 * @param windowId        window id/*from   www .ja va2s . co  m*/
 * @param bucketId        bucket id
 * @param data            data of all time-buckets
 * @throws IOException
 */
protected void writeBucketData(long windowId, long bucketId, Map<Slice, Bucket.BucketedValue> data)
        throws IOException {
    Table<Long, Slice, Bucket.BucketedValue> timeBucketedKeys = TreeBasedTable.create(Ordering.<Long>natural(),
            managedStateContext.getKeyComparator());

    for (Map.Entry<Slice, Bucket.BucketedValue> entry : data.entrySet()) {
        long timeBucketId = entry.getValue().getTimeBucket();
        timeBucketedKeys.put(timeBucketId, entry.getKey(), entry.getValue());
    }

    for (long timeBucket : timeBucketedKeys.rowKeySet()) {
        BucketsFileSystem.MutableTimeBucketMeta tbm = getMutableTimeBucketMeta(bucketId, timeBucket);
        if (tbm == null) {
            tbm = new MutableTimeBucketMeta(bucketId, timeBucket);
        }

        addBucketName(bucketId);

        long dataSize = 0;
        Slice firstKey = null;

        FileAccess.FileWriter fileWriter;
        String tmpFileName = getTmpFileName();
        if (tbm.getLastTransferredWindowId() == -1) {
            //A new time bucket so we append all the key/values to the new file
            fileWriter = getWriter(bucketId, tmpFileName);

            for (Map.Entry<Slice, Bucket.BucketedValue> entry : timeBucketedKeys.row(timeBucket).entrySet()) {
                Slice key = entry.getKey();
                Slice value = entry.getValue().getValue();

                dataSize += key.length;
                dataSize += value.length;

                fileWriter.append(key, value);
                if (firstKey == null) {
                    firstKey = key;
                }
            }
        } else {
            //the time bucket existed so we need to read the file and then re-write it
            TreeMap<Slice, Slice> fileData = new TreeMap<>(managedStateContext.getKeyComparator());
            FileAccess.FileReader fileReader = getReader(bucketId, getFileName(timeBucket));
            fileReader.readFully(fileData);
            fileReader.close();

            for (Map.Entry<Slice, Bucket.BucketedValue> entry : timeBucketedKeys.row(timeBucket).entrySet()) {
                fileData.put(entry.getKey(), entry.getValue().getValue());
            }

            fileWriter = getWriter(bucketId, tmpFileName);
            for (Map.Entry<Slice, Slice> entry : fileData.entrySet()) {
                Slice key = entry.getKey();
                Slice value = entry.getValue();

                dataSize += key.length;
                dataSize += value.length;

                fileWriter.append(key, value);
                if (firstKey == null) {
                    firstKey = key;
                }
            }
        }
        fileWriter.close();
        rename(bucketId, tmpFileName, getFileName(timeBucket));
        tbm.updateTimeBucketMeta(windowId, dataSize, firstKey);
        updateTimeBuckets(tbm);
    }

    updateBucketMetaFile(bucketId);
}

From source file:com.gradleware.tooling.toolingmodel.repository.internal.DefaultOmniBuildInvocationsContainerBuilder.java

@SuppressWarnings("StringEquality")
private static ImmutableMultimap<Path, OmniTaskSelector> buildTaskSelectorsRecursively(GradleProject project,
        Multimap<Path, OmniTaskSelector> taskSelectorsPerProject, boolean enforceAllTasksPublic) {
    // add task selectors of the current project
    TreeBasedTable<String, Path, String> aggregatedTasksWithDescription = TreeBasedTable
            .create(Ordering.usingToString(), Path.Comparator.INSTANCE);
    Set<String> publicTasks = Sets.newLinkedHashSet();
    collectAllTasksRecursively(project, aggregatedTasksWithDescription, publicTasks, enforceAllTasksPublic);
    for (String selectorName : aggregatedTasksWithDescription.rowKeySet()) {
        SortedMap<Path, String> pathsAndDescriptions = aggregatedTasksWithDescription.row(selectorName);
        String description = pathsAndDescriptions.get(pathsAndDescriptions.firstKey()); // description from project task with smallest path
        SortedSet<Path> fqnTaskNames = ImmutableSortedSet.orderedBy(Path.Comparator.INSTANCE)
                .addAll(pathsAndDescriptions.keySet()).build();

        OmniTaskSelector taskSelector = DefaultOmniTaskSelector.from(selectorName,
                description != NULL_STRING ? description : null, Path.from(project.getPath()),
                publicTasks.contains(selectorName), fqnTaskNames);

        taskSelectorsPerProject.put(Path.from(project.getPath()), taskSelector);
    }/*from   ww w .ja  v  a  2  s . c  o  m*/

    // recurse into child projects and add their task selectors
    DomainObjectSet<? extends GradleProject> childProjects = project.getChildren();
    for (GradleProject childProject : childProjects) {
        buildTaskSelectorsRecursively(childProject, taskSelectorsPerProject, enforceAllTasksPublic);
    }

    // return the task selectors grouped by project path
    return ImmutableMultimap.copyOf(taskSelectorsPerProject);
}

From source file:com.google.gerrit.server.notedb.ChangeUpdate.java

private static Table<String, Account.Id, Optional<Short>> approvals(Comparator<String> nameComparator) {
    return TreeBasedTable.create(nameComparator, comparing(IntKey::get));
}

From source file:com.oodrive.nuage.dtx.DtxTestHelper.java

/**
 * Prepares, i.e. writes transactions to a set of journals according to the information given as a {@link Table}.
 * //from  w  w  w  . java  2 s .  com
 * @param dtxMgrTxTable
 *            (sorted) {@link TreeBasedTable} mapping resource manager {@link UUID}s, last transaction IDs to
 *            {@link DtxManager} instances
 * @param journalDirMap
 *            a {@link Map} providing the temporary directories for each {@link DtxManager}
 * @param setupRotMgr
 *            a central {@link JournalRotationManager} used to write the prepared journals
 * @return a {@link Table} of non-failing mock {@link DtxResourceManager}s with their last transaction ID and
 *         journal file directories
 * @throws IllegalStateException
 *             if writing to journals fails due to their internal state
 * @throws IOException
 *             if writing or reading data fails
 * @throws XAException
 *             if mock setup fails
 */
public static final Table<DtxResourceManager, Long, Path> prepareExistingJournals(
        final TreeBasedTable<Long, UUID, DtxManager> dtxMgrTxTable, final Map<DtxManager, Path> journalDirMap,
        final JournalRotationManager setupRotMgr) throws IllegalStateException, IOException, XAException {

    // reference map to order resource managers by their last tx ID
    final Map<DtxResourceManager, Long> rankMap = new HashMap<DtxResourceManager, Long>();

    final Comparator<DtxResourceManager> rowComp = new Comparator<DtxResourceManager>() {

        @Override
        public final int compare(final DtxResourceManager o1, final DtxResourceManager o2) {
            // compare last tx IDs before falling back to classic comparison
            final Long rank1 = rankMap.get(o1);
            final Long rank2 = rankMap.get(o2);
            final int rankComp = Long.compare(rank1.longValue(), rank2.longValue()) * -1;
            if (rankComp != 0) {
                return rankComp;
            }

            // fall back to comparing IDs
            final UUID id1 = o1.getId();
            final UUID id2 = o2.getId();
            final int idComp = id1.compareTo(id2);

            // maintain coherence with equals()
            if (idComp == 0) {
                return Integer.compare(o1.hashCode(), o2.hashCode());
            }
            return idComp;
        }
    };

    final Comparator<Long> columnComp = Collections.reverseOrder();

    final Table<DtxResourceManager, Long, Path> result = TreeBasedTable.create(rowComp, columnComp);

    final HashMap<UUID, DtxManager> previousLog = new HashMap<UUID, DtxManager>();

    long lastTxId = TX_ID.get();

    for (final Long currTargetTxId : dtxMgrTxTable.rowKeySet()) {

        final long targetTxId = currTargetTxId.longValue();
        if (targetTxId <= lastTxId) {
            throw new IllegalArgumentException(
                    "Not increasing transaction IDs; lastTxId=" + lastTxId + ", targetTxId=" + targetTxId);
        }

        final Set<TxNode> participants = newRandomParticipantsSet();

        final SortedMap<UUID, DtxManager> currRow = dtxMgrTxTable.row(currTargetTxId);

        // / insert here
        for (final UUID currResMgrId : currRow.keySet()) {

            final DtxResourceManager currResMgr = DtxDummyRmFactory
                    .newResMgrThatDoesEverythingRight(currResMgrId);

            final DtxManager currDtxMgr = currRow.get(currResMgrId);
            final Path currTmpDir = journalDirMap.get(currDtxMgr);

            final String journalFilename = newJournalFilePrefix(currDtxMgr.getNodeId(), currResMgrId);

            final WritableTxJournal targetJournal = new WritableTxJournal(currTmpDir.toFile(), journalFilename,
                    0, setupRotMgr);

            targetJournal.start();
            assertEquals(DEFAULT_LAST_TX_VALUE, targetJournal.getLastFinishedTxId());

            final WritableTxJournal prevJournal;
            final DtxManager prevDtxMgr = previousLog.get(currResMgrId);
            if (prevDtxMgr != null) {
                prevJournal = new WritableTxJournal(journalDirMap.get(prevDtxMgr).toFile(),
                        newJournalFilePrefix(prevDtxMgr.getNodeId(), currResMgrId), 0, setupRotMgr);
            } else {
                prevJournal = null;
            }

            // re-reads the previous journal and copies it to the new target
            if (prevJournal != null) {
                prevJournal.start();
                for (final JournalRecord currRecord : prevJournal.newReadOnlyTxJournal()) {
                    final TxJournalEntry currEntry = TxJournalEntry.parseFrom(currRecord.getEntry());
                    switch (currEntry.getOp()) {
                    case START:
                        targetJournal.writeStart(currEntry.getTx(), currEntry.getTxNodesList());
                        break;
                    case COMMIT:
                        targetJournal.writeCommit(currEntry.getTxId(), currEntry.getTxNodesList());
                        break;
                    case ROLLBACK:
                        targetJournal.writeRollback(currEntry.getTxId(), currEntry.getErrCode(),
                                currEntry.getTxNodesList());
                        break;
                    default:
                        // nothing
                    }
                }
                prevJournal.stop();
            }

            final int nbToWrite = Long
                    .valueOf(lastTxId == DEFAULT_LAST_TX_VALUE ? targetTxId - 1 : targetTxId - lastTxId)
                    .intValue();
            lastTxId = DtxTestHelper.writeCompleteTransactions(targetJournal, nbToWrite, currResMgrId,
                    participants);
            assertEquals(targetTxId, lastTxId);
            assertEquals(lastTxId, targetJournal.getLastFinishedTxId());
            targetJournal.stop();

            previousLog.put(currResMgrId, currDtxMgr);

            rankMap.put(currResMgr, currTargetTxId);
            result.put(currResMgr, currTargetTxId, currTmpDir);
        }

    }
    return result;
}

From source file:com.google.cloud.dataflow.sdk.options.PipelineOptionsFactory.java

/**
 * Outputs the set of options available to be set for the passed in {@link PipelineOptions}
 * interface. The output is in a human readable format. The format is:
 * <pre>// w  w  w .j a  v a  2s .  co  m
 * OptionGroup:
 *     ... option group description ...
 *
 *  --option1={@code <type>} or list of valid enum choices
 *     Default: value (if available, see {@link Default})
 *     ... option description ... (if available, see {@link Description})
 *     Required groups (if available, see {@link Required})
 *  --option2={@code <type>} or list of valid enum choices
 *     Default: value (if available, see {@link Default})
 *     ... option description ... (if available, see {@link Description})
 *     Required groups (if available, see {@link Required})
 * </pre>
 * This method will attempt to format its output to be compatible with a terminal window.
 */
public static void printHelp(PrintStream out, Class<? extends PipelineOptions> iface) {
    checkNotNull(out);
    checkNotNull(iface);
    validateWellFormed(iface, REGISTERED_OPTIONS);

    Set<PipelineOptionSpec> properties = PipelineOptionsReflector.getOptionSpecs(iface);

    RowSortedTable<Class<?>, String, Method> ifacePropGetterTable = TreeBasedTable
            .create(ClassNameComparator.INSTANCE, Ordering.natural());
    for (PipelineOptionSpec prop : properties) {
        ifacePropGetterTable.put(prop.getDefiningInterface(), prop.getName(), prop.getGetterMethod());
    }

    for (Map.Entry<Class<?>, Map<String, Method>> ifaceToPropertyMap : ifacePropGetterTable.rowMap()
            .entrySet()) {
        Class<?> currentIface = ifaceToPropertyMap.getKey();
        Map<String, Method> propertyNamesToGetters = ifaceToPropertyMap.getValue();

        SortedSetMultimap<String, String> requiredGroupNameToProperties = getRequiredGroupNamesToProperties(
                propertyNamesToGetters);

        out.format("%s:%n", currentIface.getName());
        prettyPrintDescription(out, currentIface.getAnnotation(Description.class));

        out.println();

        List<String> lists = Lists.newArrayList(propertyNamesToGetters.keySet());
        Collections.sort(lists, String.CASE_INSENSITIVE_ORDER);
        for (String propertyName : lists) {
            Method method = propertyNamesToGetters.get(propertyName);
            String printableType = method.getReturnType().getSimpleName();
            if (method.getReturnType().isEnum()) {
                printableType = Joiner.on(" | ").join(method.getReturnType().getEnumConstants());
            }
            out.format("  --%s=<%s>%n", propertyName, printableType);
            Optional<String> defaultValue = getDefaultValueFromAnnotation(method);
            if (defaultValue.isPresent()) {
                out.format("    Default: %s%n", defaultValue.get());
            }
            prettyPrintDescription(out, method.getAnnotation(Description.class));
            prettyPrintRequiredGroups(out, method.getAnnotation(Validation.Required.class),
                    requiredGroupNameToProperties);
        }
        out.println();
    }
}