Example usage for com.google.common.collect Table columnMap

List of usage examples for com.google.common.collect Table columnMap

Introduction

In this page you can find the example usage for com.google.common.collect Table columnMap.

Prototype

Map<C, Map<R, V>> columnMap();

Source Link

Document

Returns a view that associates each column key with the corresponding map from row keys to values.

Usage

From source file:de.tudarmstadt.ukp.dkpro.keyphrases.bookindexing.aggregation.aggregationstrategy.TopNAggregation_Position.java

@Override
public List<RankedPhrase> aggregatePhrases(Table<String, Integer, Double> phraseSegmentTable) {

    List<RankedPhrase> rankedPhrases = new ArrayList<RankedPhrase>();

    Map<Integer, Map<String, Double>> columnMap = phraseSegmentTable.columnMap();
    int maxRank = phraseSegmentTable.rowKeySet().size();
    int segments = phraseSegmentTable.rowKeySet().size();

    int segmentIndex = 1;
    for (Map<String, Double> column : columnMap.values()) {

        int baseRank = maxRank;
        for (Entry<String, Double> entry : sortByValue(column)) {

            double positionBonus = 1D - (segmentIndex / (double) segments);
            double newScore = baseRank-- + positionBonus;

            rankedPhrases.add(new RankedPhrase(entry.getKey(), newScore));

        }/*ww w  . ja va2 s. com*/
        segmentIndex++;
    }
    return rankedPhrases;

}

From source file:de.tudarmstadt.ukp.dkpro.keyphrases.bookindexing.aggregation.aggregationstrategy.TopNAggregation_Score.java

@Override
public List<RankedPhrase> aggregatePhrases(Table<String, Integer, Double> phraseSegmentTable) {

    List<RankedPhrase> rankedPhrases = new ArrayList<RankedPhrase>();

    Map<Integer, Map<String, Double>> columnMap = phraseSegmentTable.columnMap();
    int maxRank = phraseSegmentTable.rowKeySet().size();
    double maxScore = maximum(phraseSegmentTable.values());

    for (Map<String, Double> column : columnMap.values()) {

        int baseRank = maxRank;
        for (Entry<String, Double> entry : sortByValue(column)) {

            double normalizedOldScore = entry.getValue() / maxScore;
            double newScore = baseRank-- + normalizedOldScore;

            rankedPhrases.add(new RankedPhrase(entry.getKey(), newScore));

        }// w ww  . j a  va2s . c o m
    }
    return rankedPhrases;

}

From source file:uk.ac.open.kmi.iserve.discovery.disco.impl.OperationDataflowMatcher.java

/**
 * Perform a match between two URIs (from {@code origin} to {@code destination})
 * and returns the result.//from  w  w  w  .j a va 2s.  c o  m
 *
 * @param origin      URI of the element to match
 * @param destination URI of the element to match against
 * @return {@link uk.ac.open.kmi.iserve.discovery.api.MatchResult} with the result of the matching.
 */
@Override
public MatchResult match(URI origin, URI destination) {

    Set<URI> originOutputs = this.serviceManager.listOutputs(origin);
    Set<URI> destinationInputs = this.serviceManager.listInputs(destination);

    // Obtain all combinations of matches
    Table<URI, URI, MatchResult> matches = this.conceptMatcher.match(originOutputs, destinationInputs);
    // Collect the best match results for each of the destination's inputs
    // We should try and maximise the fulfillment of destination's inputs rather than be guided by the origin outputs
    ImmutableSet.Builder<MatchResult> builder = ImmutableSet.builder();

    for (Map.Entry<URI, Map<URI, MatchResult>> entry : matches.columnMap().entrySet()) {
        Map<URI, MatchResult> matchesMap = entry.getValue();
        Ordering<URI> valueComparator = Ordering.from(MatchResultComparators.BY_TYPE)
                .onResultOf(Functions.forMap(matchesMap)) // Order by value
                .compound(Ordering.natural()); // Order by URI eventually

        URI bestMatchUri = valueComparator.max(matchesMap.keySet());
        log.info("The best match for {} is {}, with a Match Result of {}", entry.getKey(), bestMatchUri,
                matchesMap.get(bestMatchUri).getMatchType());

        builder.add(matchesMap.get(bestMatchUri));
    }

    MatchResult result = INTERSECTION.apply(builder.build());
    log.info("Combined match result - {}", result);
    return result;
}

From source file:org.caleydo.core.data.collection.table.NumericalTable.java

/**
 *
 *///from ww  w  .  jav  a2 s .  c  o m
private void performImputation(KNNImputeDescription desc) {

    Stopwatch w = new Stopwatch().start();
    ImmutableList.Builder<Gene> b = ImmutableList.builder();
    final int rows = getNrRows();
    final int cols = columns.size();

    // create data
    if (desc.getDimension().isRecord()) {
        for (int i = 0; i < rows; ++i) {
            float[] data = new float[cols];
            int nans = 0;
            int j = 0;
            for (AColumn<?, ?> column : columns) {
                @SuppressWarnings("unchecked")
                NumericalColumn<?, Float> nColumn = (NumericalColumn<?, Float>) column;
                Float raw = nColumn.getRaw(i);
                if (raw == null || raw.isNaN())
                    nans++;
                data[j++] = raw == null ? Float.NaN : raw.floatValue();
            }
            b.add(new Gene(i, nans, data));
        }
    } else {
        int i = 0;
        for (AColumn<?, ?> column : columns) {
            float[] data = new float[rows];
            int nans = 0;
            @SuppressWarnings("unchecked")
            NumericalColumn<?, Float> nColumn = (NumericalColumn<?, Float>) column;

            for (int j = 0; j < rows; j++) {
                Float raw = nColumn.getRaw(i);
                if (raw == null || raw.isNaN())
                    nans++;
                data[j++] = raw == null ? Float.NaN : raw.floatValue();
            }
            b.add(new Gene(i++, nans, data));
        }
    }

    System.out.println("NumericalTable.performImputation() data creation:\t" + w);
    w.reset().start();
    KNNImpute task = new KNNImpute(desc, b.build());
    ForkJoinPool pool = new ForkJoinPool();
    com.google.common.collect.Table<Integer, Integer, Float> impute = pool.invoke(task);
    pool.shutdown();
    System.out.println("NumericalTable.performImputation() computation:\t" + w);
    w.reset().start();

    // update data
    final boolean isColumnFirstDimension = desc.getDimension().isDimension();
    // in either case iterate over the columns first and update a columns at once
    for (Map.Entry<Integer, Map<Integer, Float>> entry : (isColumnFirstDimension ? impute.rowMap()
            : impute.columnMap()).entrySet()) {
        AColumn<?, ?> aColumn = columns.get(entry.getKey().intValue());
        @SuppressWarnings("unchecked")
        NumericalColumn<?, Float> nColumn = (NumericalColumn<?, Float>) aColumn;
        // apply updates
        for (Map.Entry<Integer, Float> entry2 : entry.getValue().entrySet()) {
            nColumn.setRaw(entry2.getKey(), entry2.getValue());
        }
    }
    System.out.println("NumericalTable.performImputation() update:\t" + w);
}

From source file:co.cask.cdap.app.mapreduce.LocalMRJobInfoFetcher.java

/**
 * @param runId for which information will be returned.
 * @return a {@link MRJobInfo} containing information about a particular MapReduce program run.
 */// ww w . j  a  va 2 s .c  o  m
public MRJobInfo getMRJobInfo(Id.Run runId) throws Exception {
    Preconditions.checkArgument(ProgramType.MAPREDUCE.equals(runId.getProgram().getType()));

    // baseTags has tag keys: ns.app.mr.runid
    Map<String, String> baseTags = Maps.newHashMap();
    baseTags.put(Constants.Metrics.Tag.NAMESPACE, runId.getNamespace().getId());
    baseTags.put(Constants.Metrics.Tag.APP, runId.getProgram().getApplicationId());
    baseTags.put(Constants.Metrics.Tag.MAPREDUCE, runId.getProgram().getId());
    baseTags.put(Constants.Metrics.Tag.RUN_ID, runId.getId());

    Map<String, String> mapTags = Maps.newHashMap(baseTags);
    mapTags.put(Constants.Metrics.Tag.MR_TASK_TYPE, MapReduceMetrics.TaskType.Mapper.getId());

    Map<String, String> reduceTags = Maps.newHashMap(baseTags);
    reduceTags.put(Constants.Metrics.Tag.MR_TASK_TYPE, MapReduceMetrics.TaskType.Reducer.getId());

    // map from RunId -> (CounterName -> CounterValue)
    Table<String, String, Long> mapTaskMetrics = HashBasedTable.create();
    Table<String, String, Long> reduceTaskMetrics = HashBasedTable.create();

    // Populate mapTaskMetrics and reduce Task Metrics via MetricStore. Used to construct MRTaskInfo below.
    Map<String, String> metricNamesToCounters = Maps.newHashMap();
    metricNamesToCounters.put(prependSystem(MapReduceMetrics.METRIC_TASK_INPUT_RECORDS),
            TaskCounter.MAP_INPUT_RECORDS.name());
    metricNamesToCounters.put(prependSystem(MapReduceMetrics.METRIC_TASK_OUTPUT_RECORDS),
            TaskCounter.MAP_OUTPUT_RECORDS.name());
    metricNamesToCounters.put(prependSystem(MapReduceMetrics.METRIC_TASK_BYTES),
            TaskCounter.MAP_OUTPUT_BYTES.name());
    metricNamesToCounters.put(prependSystem(MapReduceMetrics.METRIC_TASK_COMPLETION),
            MapReduceMetrics.METRIC_TASK_COMPLETION);

    // get metrics grouped by instance-id for the map tasks
    queryGroupedAggregates(mapTags, mapTaskMetrics, metricNamesToCounters);

    Map<String, Long> mapProgress = Maps.newHashMap();
    if (mapTaskMetrics.columnMap().containsKey(MapReduceMetrics.METRIC_TASK_COMPLETION)) {
        mapProgress = Maps
                .newHashMap(mapTaskMetrics.columnMap().remove(MapReduceMetrics.METRIC_TASK_COMPLETION));
    }

    Map<String, String> reduceMetricsToCounters = Maps.newHashMap();
    reduceMetricsToCounters.put(prependSystem(MapReduceMetrics.METRIC_TASK_INPUT_RECORDS),
            TaskCounter.REDUCE_INPUT_RECORDS.name());
    reduceMetricsToCounters.put(prependSystem(MapReduceMetrics.METRIC_TASK_OUTPUT_RECORDS),
            TaskCounter.REDUCE_OUTPUT_RECORDS.name());
    reduceMetricsToCounters.put(prependSystem(MapReduceMetrics.METRIC_TASK_COMPLETION),
            MapReduceMetrics.METRIC_TASK_COMPLETION);

    // get metrics grouped by instance-id for the map tasks
    queryGroupedAggregates(reduceTags, reduceTaskMetrics, reduceMetricsToCounters);

    Map<String, Long> reduceProgress = Maps.newHashMap();
    if (reduceTaskMetrics.columnMap().containsKey(MapReduceMetrics.METRIC_TASK_COMPLETION)) {
        reduceProgress = Maps
                .newHashMap(reduceTaskMetrics.columnMap().remove(MapReduceMetrics.METRIC_TASK_COMPLETION));
    }

    // Construct MRTaskInfos from the information we can get from Metric system.
    List<MRTaskInfo> mapTaskInfos = Lists.newArrayList();
    for (Map.Entry<String, Map<String, Long>> taskEntry : mapTaskMetrics.rowMap().entrySet()) {
        String mapTaskId = taskEntry.getKey();
        mapTaskInfos.add(new MRTaskInfo(mapTaskId, null, null, null, mapProgress.get(mapTaskId) / 100.0F,
                taskEntry.getValue()));
    }

    List<MRTaskInfo> reduceTaskInfos = Lists.newArrayList();
    for (Map.Entry<String, Map<String, Long>> taskEntry : reduceTaskMetrics.rowMap().entrySet()) {
        String reduceTaskId = taskEntry.getKey();
        reduceTaskInfos.add(new MRTaskInfo(reduceTaskId, null, null, null,
                reduceProgress.get(reduceTaskId) / 100.0F, taskEntry.getValue()));
    }

    return getJobCounters(mapTags, reduceTags, mapTaskInfos, reduceTaskInfos);
}

From source file:uk.ac.open.kmi.iserve.discovery.disco.impl.GenericLogicDiscoverer.java

/**
 * Generic implementation for finding all the Services or Operations that have SOME of the given types as inputs or outputs.
 *
 * @param entityType   the MSM URI of the type of entity we are looking for. Only supports Service and Operation.
 * @param relationship the MSM URI of the relationship we are looking for. Only supports hasInput and hasOutput.
 * @param types        the input/output types (modelReferences that is) we are looking for
 * @return a Map mapping operation/services URIs to MatchResults.
 *///  w ww. j  a  va 2  s.  c o  m
private Map<URI, MatchResult> findSome(URI entityType, URI relationship, Set<URI> types) {

    // Ensure that we have been given correct parameters
    if (types == null || types.isEmpty()
            || (!entityType.toASCIIString().equals(MSM.Service.getURI())
                    && !entityType.toASCIIString().equals(MSM.Operation.getURI()))
            || (!relationship.toASCIIString().equals(MSM.hasInput.getURI())
                    && !entityType.toASCIIString().equals(MSM.hasOutput.getURI())
                    && !relationship.toASCIIString().equals(SAWSDL.modelReference.getURI()))) {

        return ImmutableMap.of();
    }

    // Expand the input types to get all that match enough to be consumed
    // TODO: The leastOfType should be configurable
    Table<URI, URI, MatchResult> expandedTypes;
    if (relationship.toASCIIString().equals(SAWSDL.modelReference.getURI())) {
        expandedTypes = HashBasedTable.create();
        //TODO: fix this properly
        for (URI type : types) {
            expandedTypes.putAll(this.conceptMatcher.listMatchesAtMostOfType(ImmutableSet.of(type),
                    LogicConceptMatchType.Subsume));
            expandedTypes.putAll(
                    this.conceptMatcher.listMatchesOfType(ImmutableSet.of(type), LogicConceptMatchType.Exact));
        }

    } else {
        expandedTypes = this.conceptMatcher.listMatchesAtLeastOfType(types, LogicConceptMatchType.Plugin);
    }

    // Track all the results in a multimap to push the details up the stack
    Multimap<URI, MatchResult> result = ArrayListMultimap.create();

    // Find all the entities with modelReferences to the expanded types
    // The column view is the one with all the possible matches since a class will always match itself
    Map<URI, Map<URI, MatchResult>> columnMap = expandedTypes.columnMap();
    for (URI type : columnMap.keySet()) {
        Set<URI> entities = ImmutableSet.of();
        if (relationship.toASCIIString().equals(SAWSDL.modelReference.getURI())) {
            entities = listEntitesWithModelReference(entityType, type);
        } else if (relationship.toASCIIString().equals(MSM.hasInput.getURI())
                || relationship.toASCIIString().equals(MSM.hasOutput.getURI())) {
            entities = listEntitiesWithType(entityType, relationship, type);
        }

        for (URI entity : entities) {
            result.putAll(entity, columnMap.get(type).values());
        }
    }

    // Merge the results into a single map using Union
    return Maps.transformValues(result.asMap(), MatchResultsMerger.UNION);

}

From source file:co.cask.cdap.internal.app.runtime.distributed.DistributedProgramRuntimeService.java

@Override
public synchronized Map<RunId, RuntimeInfo> list(ProgramType type) {
    Map<RunId, RuntimeInfo> result = Maps.newHashMap();
    result.putAll(super.list(type));

    // Table holds the Twill RunId and TwillController associated with the program matching the input type
    Table<Id.Program, RunId, TwillController> twillProgramInfo = HashBasedTable.create();

    // Goes through all live application and fill the twillProgramInfo table
    for (TwillRunner.LiveInfo liveInfo : twillRunner.lookupLive()) {
        String appName = liveInfo.getApplicationName();
        Matcher matcher = APP_NAME_PATTERN.matcher(appName);
        if (!matcher.matches()) {
            continue;
        }//from  w w w. j  a  v  a2  s .  c o m
        if (!type.equals(getType(matcher.group(1)))) {
            continue;
        }

        for (TwillController controller : liveInfo.getControllers()) {
            RunId twillRunId = controller.getRunId();
            if (isTwillRunIdCached(twillRunId)) {
                continue;
            }

            Id.Program programId = Id.Program.from(matcher.group(2), matcher.group(3), type, matcher.group(4));
            twillProgramInfo.put(programId, twillRunId, controller);
        }
    }

    if (twillProgramInfo.isEmpty()) {
        return ImmutableMap.copyOf(result);
    }

    final Set<RunId> twillRunIds = twillProgramInfo.columnKeySet();
    List<RunRecordMeta> activeRunRecords = store.getRuns(ProgramRunStatus.RUNNING,
            new Predicate<RunRecordMeta>() {
                @Override
                public boolean apply(RunRecordMeta record) {
                    return record.getTwillRunId() != null && twillRunIds
                            .contains(org.apache.twill.internal.RunIds.fromString(record.getTwillRunId()));
                }
            });

    for (RunRecordMeta record : activeRunRecords) {
        RunId twillRunIdFromRecord = org.apache.twill.internal.RunIds.fromString(record.getTwillRunId());
        // Get the CDAP RunId from RunRecord
        RunId runId = RunIds.fromString(record.getPid());
        // Get the Program and TwillController for the current twillRunId
        Map<Id.Program, TwillController> mapForTwillId = twillProgramInfo.columnMap().get(twillRunIdFromRecord);
        Map.Entry<Id.Program, TwillController> entry = mapForTwillId.entrySet().iterator().next();

        // Create RuntimeInfo for the current Twill RunId
        RuntimeInfo runtimeInfo = createRuntimeInfo(entry.getKey(), entry.getValue(), runId);
        if (runtimeInfo != null) {
            result.put(runId, runtimeInfo);
            updateRuntimeInfo(type, runId, runtimeInfo);
        } else {
            LOG.warn("Unable to find program {} {}", type, entry.getKey());
        }
    }

    return ImmutableMap.copyOf(result);
}