Example usage for com.google.common.collect TreeMultimap get

List of usage examples for com.google.common.collect TreeMultimap get

Introduction

In this page you can find the example usage for com.google.common.collect TreeMultimap get.

Prototype

@Override
@GwtIncompatible("NavigableSet")
public NavigableSet<V> get(@Nullable K key) 

Source Link

Usage

From source file:sadl.modellearner.rtiplus.SearchingPDRTALearner.java

private void greedyRTIplus(PDRTA a, StateColoring sc) {

    final boolean preExit = (bOp[2] instanceof OrOperator)
            && distrCheckType.equals(DistributionCheckType.DISABLED);
    if (preExit) {
        logger.info("Pre-Exiting algorithm when number of tails falls below minData");
    }/*from  ww  w. j a v a  2 s  .  co  m*/

    int counter = 0;
    Transition t;
    while ((t = getMostVisitedTrans(a, sc)) != null
            && !(preExit && t.in.getTails().size() >= PDRTA.getMinData())) {
        if (directory != null) {
            draw(a, true, directory, counter);
        }
        logger.debug("Automaton contains {} states and {} transitions", a.getStateCount(), a.getSize());
        logger.debug("Found most visited transition  {}  containing {} tails", t.toString(),
                t.in.getTails().size());
        counter++;

        if (!distrCheckType.equals(DistributionCheckType.DISABLED)) {
            logger.debug("Checking data distribution");
            final List<Interval> idaIns = checkDistribution(t.source, t.symAlphIdx, distrCheckType, sc);
            if (idaIns.size() > 0) {
                logger.debug("#{} DO: Split interval due to IDA into {} intervals", counter, idaIns.size());
                // TODO Printing the intervals may be to expensive just for logging
                final StringBuilder sb = new StringBuilder();
                for (final Interval in : idaIns) {
                    sb.append("  ");
                    sb.append(in.toString());
                }
                logger.trace("Resulting intervals are:{}", sb.toString());
                continue;
            } else {
                logger.debug("No splits because of data distributuion were perfomed in:  {}", t.in.toString());
                if (bOp[2] instanceof OrOperator && t.in.getTails().size() < PDRTA.getMinData()) {
                    // Shortcut for skipping merges and splits when OR is selected
                    if (mainModel == a) {
                        logger.debug("#{} DO: Color state {} red", counter, t.target.getIndex());
                    }
                    sc.setRed(t.target);
                    continue;
                }
            }
        }

        logger.debug("Testing splits");
        final NavigableSet<Refinement> splits = getSplitRefs(t, sc).descendingSet();
        logger.debug("Found {} possible splits", splits.size());
        logger.debug("Testing merges");
        final NavigableSet<Refinement> merges = getMergeRefs(t, sc).descendingSet();
        logger.debug("Found {} possible merges", merges.size());
        logger.debug("Calculating sizes for splits");

        final TreeMultimap<Double, Refinement> all = TreeMultimap.create();
        int c = 0;
        for (final Refinement r : splits) {
            if (c >= maxSplitsToSearch) {
                break;
            }
            final PDRTA copy = new PDRTA(a);
            final StateColoring cColoring = new StateColoring(sc, copy);
            final Refinement cR = new Refinement(copy, r, cColoring);
            cR.refine();
            complete(copy, cColoring);
            // TODO Create algo param for selecting between AIC and size
            // final double modelScore = copy.getSize();
            final double modelScore = calcAIC(copy);
            all.put(modelScore, r);
            c++;
        }

        logger.debug("Calculating sizes for merges");
        c = 0;
        for (final Refinement r : merges) {
            if (c >= maxMergesToSearch) {
                break;
            }
            final PDRTA copy = new PDRTA(a);
            final StateColoring cColoring = new StateColoring(sc, copy);
            final Refinement cR = new Refinement(copy, r, cColoring);
            cR.refine();
            complete(copy, cColoring);
            // TODO Create algo param for selecting between AIC and size
            // final double modelScore = copy.getSize();
            final double modelScore = calcAIC(copy);
            all.put(modelScore, r);
            c++;
        }

        assert (all.size() <= (maxMergesToSearch + maxSplitsToSearch));
        if (!all.isEmpty()) {
            final double minSize = all.keySet().first();
            final Refinement r = all.get(minSize).last();
            logger.debug("#{} DO: {}  quality={}", counter, r.toString(), minSize);
            r.refine();
        } else {
            logger.debug("#{} DO: Color state {} red", counter, t.target.getIndex());
            sc.setRed(t.target);
        }
        if (Settings.isDebug()) {
            a.checkConsistency();
        }
    }

    a.checkConsistency();
    assert (a.getStateCount() == sc.getNumRedStates());
    if (directory != null) {
        draw(a, true, directory, counter);
    }
}

From source file:org.apromore.toolbox.clustering.dissimilarity.algorithm.SimpleGEDDeterministicGreedy.java

public double compute(SimpleGraph sg1, SimpleGraph sg2) {
    init(sg1, sg2);/*  www  .j  a  v  a 2 s.  c o  m*/

    TwoVertices couple;
    Vector<TwoVertices> bestCandidates;
    Set<TwoVertices> newMapping;
    Set<TwoVertices> newOpenCouples;
    Set<TwoVertices> mapping = new HashSet<>();
    Set<TwoVertices> openCouples = findCouples(sg1.getVertices(), sg2.getVertices());

    String tmp, label1, label2, contextkey, firstkey;
    double newEditDistance;
    double newShortestEditDistance;
    double shortestEditDistance = Double.MAX_VALUE;
    Random randomized = new Random(123456789);

    TreeMultiset<String> mset;
    TreeMultimap<String, TwoVertices> tmap;
    TreeMultimap<String, TwoVertices> tmapp;

    boolean doStep = true;
    while (doStep) {
        doStep = false;
        bestCandidates = new Vector<>();
        newShortestEditDistance = shortestEditDistance;

        for (TwoVertices oCouple : openCouples) {
            newMapping = new HashSet<>(mapping);
            newMapping.add(oCouple);
            newEditDistance = this.editDistance(newMapping);
            LOGGER.debug("Couple Distance: " + newEditDistance + " - " + oCouple.v1 + " * " + oCouple.v2);

            if (newEditDistance < newShortestEditDistance) {
                bestCandidates = new Vector<>();
                bestCandidates.add(oCouple);
                newShortestEditDistance = newEditDistance;
            } else if (newEditDistance == newShortestEditDistance) {
                bestCandidates.add(oCouple);
            }
        }

        if (bestCandidates.size() > 0) {
            if (bestCandidates.size() == 1)
                couple = bestCandidates.firstElement();
            else {
                tmap = TreeMultimap.create();
                for (TwoVertices pair : bestCandidates) {
                    label1 = sg1.getLabel(pair.v1);
                    label2 = sg2.getLabel(pair.v2);
                    if (label1.compareTo(label2) > 0) {
                        tmp = label1;
                        label1 = label2;
                        label2 = tmp;
                    }
                    tmap.put(label1 + label2, pair);
                }
                firstkey = tmap.keySet().first();
                LOGGER.debug("firstkey: " + firstkey);

                if (tmap.get(firstkey).size() == 1) {
                    couple = tmap.get(firstkey).first();
                } else if (tmap.get(firstkey).size() > 1) {
                    tmapp = TreeMultimap.create();
                    mset = TreeMultiset.create();
                    for (TwoVertices pair : tmap.get(firstkey)) {
                        label1 = sg1.getLabel(pair.v1);
                        mset.clear();
                        for (Integer n : sg1.preSet(pair.v1)) {
                            mset.add(sg1.getLabel(n));
                        }
                        label1 += mset.toString();
                        mset.clear();
                        for (Integer n : sg1.postSet(pair.v1)) {
                            mset.add(sg1.getLabel(n));
                        }
                        label1 += mset.toString();

                        label2 = sg2.getLabel(pair.v2);
                        mset.clear();
                        for (Integer n : sg2.preSet(pair.v2)) {
                            mset.add(sg2.getLabel(n));
                        }
                        label2 += mset.toString();
                        mset.clear();
                        for (Integer n : sg2.postSet(pair.v2)) {
                            mset.add(sg2.getLabel(n));
                        }
                        label2 += mset.toString();

                        if (label1.compareTo(label2) > 0) {
                            tmp = label1;
                            label1 = label2;
                            label2 = tmp;
                        }
                        tmapp.put(label1 + label2, pair);
                    }
                    contextkey = tmapp.keySet().first();

                    if (tmapp.get(contextkey).size() == 1) {
                        couple = tmapp.get(contextkey).first();
                    } else {
                        deterministic = false;
                        couple = bestCandidates.get(randomized.nextInt(bestCandidates.size()));
                    }
                } else {
                    //                        System.out.println("oops ...");
                    deterministic = false;
                    couple = bestCandidates.get(randomized.nextInt(bestCandidates.size()));
                }
            }

            newOpenCouples = new HashSet<>();
            for (TwoVertices p : openCouples) {
                if (!p.v1.equals(couple.v1) && !p.v2.equals(couple.v2)) {
                    newOpenCouples.add(p);
                }
            }
            openCouples = newOpenCouples;
            LOGGER.debug("openCouples: " + openCouples.size());

            mapping.add(couple);
            shortestEditDistance = newShortestEditDistance;
            doStep = true;
        }
    }
    LOGGER.debug("Mappings: " + mapping.size());

    return shortestEditDistance;
}

From source file:org.gradle.api.internal.resolve.VariantsMatcher.java

public Collection<? extends BinarySpec> filterBinaries(VariantsMetaData variantsMetaData,
        Collection<BinarySpec> binaries) {
    if (binaries.isEmpty()) {
        return binaries;
    }//w  ww. j ava2  s .  co  m
    Set<String> resolveDimensions = variantsMetaData.getNonNullVariantAxes();
    TreeMultimap<String, VariantValue> selectedVariants = TreeMultimap.create(String.CASE_INSENSITIVE_ORDER,
            SPEC_COMPARATOR);
    Set<BinarySpec> removedSpecs = Sets.newHashSet();
    for (BinarySpec binarySpec : binaries) {
        if (binarySpecType.isAssignableFrom(binarySpec.getClass())) {
            VariantsMetaData binaryVariants = DefaultVariantsMetaData.extractFrom(binarySpec, schemaStore);
            Set<String> commonsDimensions = Sets.intersection(resolveDimensions,
                    binaryVariants.getNonNullVariantAxes());
            Set<String> incompatibleDimensionTypes = VariantsMetaDataHelper
                    .determineAxesWithIncompatibleTypes(variantsMetaData, binaryVariants, commonsDimensions);
            if (incompatibleDimensionTypes.isEmpty()) {
                for (String dimension : commonsDimensions) {
                    Class<?> dimensionType = variantsMetaData.getVariantAxisType(dimension).getConcreteClass();
                    boolean isStringType = String.class == dimensionType;
                    Object requestedValue = isStringType ? variantsMetaData.getValueAsString(dimension)
                            : variantsMetaData.getValueAsType(
                                    Cast.<Class<? extends Named>>uncheckedCast(dimensionType), dimension);
                    Object binaryValue = isStringType ? binaryVariants.getValueAsString(dimension)
                            : binaryVariants.getValueAsType(
                                    Cast.<Class<? extends Named>>uncheckedCast(dimensionType), dimension);
                    VariantAxisCompatibility<Object> selector = createSelector(requestedValue);
                    if (selector.isCompatibleWithRequirement(requestedValue, binaryValue)) {
                        VariantValue value = new VariantValue(binaryValue, binarySpec);
                        SortedSet<VariantValue> variantValues = selectedVariants.get(dimension);
                        for (VariantValue variantValue : variantValues) {
                            // all the values are equal, but we store all the binaries that match that value
                            // and incrementally build a list of binaries which are excluded because of a better match
                            if (selector.betterFit(requestedValue, variantValue.value, binaryValue)) {
                                // the new value is a better fit than the old one
                                removedSpecs.add(variantValue.spec);
                            } else if (selector.betterFit(requestedValue, binaryValue, variantValue.value)) {
                                // the old value is a better fit than the new one, let's ignore the new one altogether
                                removedSpecs.add(value.spec);
                            }
                        }
                        selectedVariants.put(dimension, value);
                    } else {
                        removedSpecs.add(binarySpec);
                    }
                }
            }
        }
    }

    Set<BinarySpec> union = null;
    for (String dimension : selectedVariants.keySet()) {
        Set<BinarySpec> variantValues = ImmutableSet
                .copyOf(Iterables.transform(selectedVariants.get(dimension), VariantValue.SPEC_FUNCTION));
        union = union == null ? variantValues : Sets.union(union, variantValues);
    }
    return union == null ? Collections.<BinarySpec>emptySet() : Sets.difference(union, removedSpecs);
}

From source file:com.palantir.atlasdb.keyvalue.cassandra.CassandraKeyValueService.java

private List<Callable<Void>> getLoadWithTsTasksForSingleHost(final InetAddress host, final String tableName,
        Collection<Cell> cells, final long startTs, final boolean loadAllTs, final ThreadSafeResultVisitor v,
        final ConsistencyLevel consistency) throws Exception {
    final ColumnParent colFam = new ColumnParent(internalTableName(tableName));
    TreeMultimap<byte[], Cell> cellsByCol = TreeMultimap.create(UnsignedBytes.lexicographicalComparator(),
            Ordering.natural());// w w w. j  av  a  2s .  c  o  m
    for (Cell cell : cells) {
        cellsByCol.put(cell.getColumnName(), cell);
    }
    List<Callable<Void>> tasks = Lists.newArrayList();
    int fetchBatchCount = configManager.getConfig().fetchBatchCount();
    for (final byte[] col : cellsByCol.keySet()) {
        if (cellsByCol.get(col).size() > fetchBatchCount) {
            log.warn(
                    "Re-batching in getLoadWithTsTasksForSingleHost a call to {} for table {} that attempted to "
                            + "multiget {} rows; this may indicate overly-large batching on a higher level.\n{}",
                    host, tableName, cellsByCol.get(col).size(),
                    CassandraKeyValueServices.getFilteredStackTrace("com.palantir"));
        }
        for (final List<Cell> partition : Lists.partition(ImmutableList.copyOf(cellsByCol.get(col)),
                fetchBatchCount)) {
            tasks.add(new Callable<Void>() {
                @Override
                public Void call() throws Exception {
                    return clientPool.runWithPooledResourceOnHost(host,
                            new FunctionCheckedException<Client, Void, Exception>() {
                                @Override
                                public Void apply(Client client) throws Exception {
                                    ByteBuffer start = CassandraKeyValueServices.makeCompositeBuffer(col,
                                            startTs - 1);
                                    ByteBuffer end = CassandraKeyValueServices.makeCompositeBuffer(col, -1);
                                    SliceRange slice = new SliceRange(start, end, false,
                                            loadAllTs ? Integer.MAX_VALUE : 1);
                                    SlicePredicate pred = new SlicePredicate();
                                    pred.setSlice_range(slice);

                                    List<ByteBuffer> rowNames = Lists
                                            .newArrayListWithCapacity(partition.size());
                                    for (Cell c : partition) {
                                        rowNames.add(ByteBuffer.wrap(c.getRowName()));
                                    }
                                    Map<ByteBuffer, List<ColumnOrSuperColumn>> results = multigetInternal(
                                            client, tableName, rowNames, colFam, pred, consistency);
                                    v.visit(results);
                                    return null;
                                }

                                @Override
                                public String toString() {
                                    return "multiget_slice(" + host + ", " + colFam + ", " + partition.size()
                                            + " rows" + ")";
                                }
                            });
                }
            });
        }
    }
    return tasks;
}

From source file:org.gradle.jvm.internal.resolve.VariantsMatcher.java

public Collection<? extends BinarySpec> filterBinaries(VariantsMetaData variantsMetaData,
        Collection<BinarySpec> binaries) {
    if (binaries.isEmpty()) {
        return binaries;
    }/*from  ww  w .  j  a  va2s  . c  o m*/
    Set<String> resolveDimensions = variantsMetaData.getNonNullVariantAxes();
    TreeMultimap<String, VariantValue> selectedVariants = TreeMultimap.create(String.CASE_INSENSITIVE_ORDER,
            SPEC_COMPARATOR);
    Set<BinarySpec> removedSpecs = Sets.newHashSet();
    for (BinarySpec binarySpec : binaries) {
        if (binarySpecType.isAssignableFrom(binarySpec.getClass())) {
            VariantsMetaData binaryVariants = DefaultVariantsMetaData.extractFrom(binarySpec,
                    schemaStore.getSchema(((BinarySpecInternal) binarySpec).getPublicType()));
            Set<String> commonsDimensions = Sets.intersection(resolveDimensions,
                    binaryVariants.getNonNullVariantAxes());
            Set<String> incompatibleDimensionTypes = VariantsMetaDataHelper
                    .determineAxesWithIncompatibleTypes(variantsMetaData, binaryVariants, commonsDimensions);
            if (incompatibleDimensionTypes.isEmpty()) {
                for (String dimension : commonsDimensions) {
                    Class<?> dimensionType = variantsMetaData.getVariantAxisType(dimension).getConcreteClass();
                    boolean isStringType = String.class == dimensionType;
                    Object requestedValue = isStringType ? variantsMetaData.getValueAsString(dimension)
                            : variantsMetaData.getValueAsType(
                                    Cast.<Class<? extends Named>>uncheckedCast(dimensionType), dimension);
                    Object binaryValue = isStringType ? binaryVariants.getValueAsString(dimension)
                            : binaryVariants.getValueAsType(
                                    Cast.<Class<? extends Named>>uncheckedCast(dimensionType), dimension);
                    VariantAxisCompatibility<Object> selector = createSelector(requestedValue);
                    if (selector.isCompatibleWithRequirement(requestedValue, binaryValue)) {
                        VariantValue value = new VariantValue(binaryValue, binarySpec);
                        SortedSet<VariantValue> variantValues = selectedVariants.get(dimension);
                        for (VariantValue variantValue : variantValues) {
                            // all the values are equal, but we store all the binaries that match that value
                            // and incrementally build a list of binaries which are excluded because of a better match
                            if (selector.betterFit(requestedValue, variantValue.value, binaryValue)) {
                                // the new value is a better fit than the old one
                                removedSpecs.add(variantValue.spec);
                            } else if (selector.betterFit(requestedValue, binaryValue, variantValue.value)) {
                                // the old value is a better fit than the new one, let's ignore the new one altogether
                                removedSpecs.add(value.spec);
                            }
                        }
                        selectedVariants.put(dimension, value);
                    } else {
                        removedSpecs.add(binarySpec);
                    }
                }
            }
        }
    }

    Set<BinarySpec> union = null;
    for (String dimension : selectedVariants.keySet()) {
        Set<BinarySpec> variantValues = ImmutableSet
                .copyOf(Iterables.transform(selectedVariants.get(dimension), VariantValue.SPEC_FUNCTION));
        union = union == null ? variantValues : Sets.union(union, variantValues);
    }
    return union == null ? Collections.<BinarySpec>emptySet() : Sets.difference(union, removedSpecs);
}

From source file:org.apromore.toolbox.clustering.dissimilarity.algorithm.CanonicalGEDDeterministicGreedy.java

/**
 * @see CanonicalDistanceAlgorithm#compute(org.apromore.graph.canonical.Canonical, org.apromore.graph.canonical.Canonical)
 *      {@inheritDoc}//from   w  w w .ja va 2 s.  c  o m
 */
@Override
public double compute(Canonical sg1, Canonical sg2) {
    init(sg1, sg2);

    GEDEdge couple;
    Vector<GEDEdge> bestCandidates;
    Set<GEDEdge> newMapping;
    Set<GEDEdge> newOpenCouples;
    Set<GEDEdge> mapping = new HashSet<>();
    Set<GEDEdge> openCouples = findCouples(sg1.getNodes(), sg2.getNodes());

    String tmp, label1, label2, contextkey, firstkey;
    double newEditDistance;
    double newShortestEditDistance;
    double shortestEditDistance = Double.MAX_VALUE;
    Random randomized = new Random(123456789);

    TreeMultiset<String> mset;
    TreeMultimap<String, GEDEdge> tmap;
    TreeMultimap<String, GEDEdge> tmapp;

    boolean doStep = true;
    while (doStep) {
        doStep = false;
        bestCandidates = new Vector<>();
        newShortestEditDistance = shortestEditDistance;

        for (GEDEdge oCouple : openCouples) {
            newMapping = new HashSet<>(mapping);
            newMapping.add(oCouple);
            newEditDistance = this.editDistance(newMapping);
            LOGGER.debug("Couple Distance: " + newEditDistance + " - " + oCouple.getSource().getId() + " * "
                    + oCouple.getTarget().getId());

            if (newEditDistance < newShortestEditDistance) {
                bestCandidates = new Vector<>();
                bestCandidates.add(oCouple);
                newShortestEditDistance = newEditDistance;
            } else if (newEditDistance == newShortestEditDistance) {
                bestCandidates.add(oCouple);
            }
        }

        if (bestCandidates.size() > 0) {
            // Case 1: Only one candidate pair
            if (bestCandidates.size() == 1)
                couple = bestCandidates.firstElement();
            else {
                //  CASE 2: Lexicographical order is enough
                tmap = TreeMultimap.create();
                for (GEDEdge pair : bestCandidates) {
                    label1 = pair.getSource().getName();
                    label2 = pair.getTarget().getName();
                    if (label1.compareTo(label2) > 0) {
                        tmp = label1;
                        label1 = label2;
                        label2 = tmp;
                    }
                    tmap.put(label1 + label2, pair);
                }
                firstkey = tmap.keySet().first();
                LOGGER.debug("firstkey: " + firstkey);

                if (tmap.get(firstkey).size() == 1) {
                    couple = tmap.get(firstkey).first();

                } else if (tmap.get(firstkey).size() > 1) {
                    tmapp = TreeMultimap.create();
                    mset = TreeMultiset.create();
                    for (GEDEdge pair : tmap.get(firstkey)) {
                        label1 = pair.getSource().getName();
                        mset.clear();
                        for (CPFNode n : sg1.getDirectPredecessors(pair.getSource())) {
                            mset.add(n.getName());
                        }
                        label1 += mset.toString();
                        mset.clear();
                        for (CPFNode n : sg1.getDirectSuccessors(pair.getSource())) {
                            mset.add(n.getName());
                        }
                        label1 += mset.toString();

                        label2 = pair.getTarget().getName();
                        mset.clear();
                        for (CPFNode n : sg2.getDirectPredecessors(pair.getTarget())) {
                            mset.add(n.getName());
                        }
                        label2 += mset.toString();
                        mset.clear();
                        for (CPFNode n : sg2.getDirectSuccessors(pair.getTarget())) {
                            mset.add(n.getName());
                        }
                        label2 += mset.toString();

                        if (label1.compareTo(label2) > 0) {
                            tmp = label1;
                            label1 = label2;
                            label2 = tmp;
                        }
                        tmapp.put(label1 + label2, pair);
                    }
                    contextkey = tmapp.keySet().first();

                    // CASE 3: Composite labels (concatenation of labels of nodes surrounding the target vertex)
                    if (tmapp.get(contextkey).size() == 1) {
                        couple = tmapp.get(contextkey).first();

                    } else {
                        // CASE 4: Non deterministic choice (Choose a random candidate)
                        deterministic = false;
                        couple = bestCandidates.get(randomized.nextInt(bestCandidates.size()));
                    }
                } else {
                    // CASE 5: Non deterministic choice (Choose a random candidate)
                    deterministic = false;
                    couple = bestCandidates.get(randomized.nextInt(bestCandidates.size()));
                }
            }

            newOpenCouples = new HashSet<>();
            for (GEDEdge p : openCouples) {
                if (!p.getSource().getId().equalsIgnoreCase(couple.getSource().getId())
                        && !p.getTarget().getId().equalsIgnoreCase(couple.getTarget().getId())) {
                    newOpenCouples.add(p);
                }
            }
            openCouples = newOpenCouples;
            LOGGER.debug("openCouples: " + openCouples.size());

            mapping.add(couple);
            shortestEditDistance = newShortestEditDistance;
            doStep = true;
        }
    }
    LOGGER.debug("Mappings: " + mapping.size());

    return shortestEditDistance;
}

From source file:com.facebook.buck.apple.AppleSdkDiscovery.java

/**
 * Given a path to an Xcode developer directory and a map of
 * (xctoolchain ID: path) pairs as returned by
 * {@link AppleToolchainDiscovery}, walks through the platforms
 * and builds a map of ({@link AppleSdk}: {@link AppleSdkPaths})
 * objects describing the paths to the SDKs inside.
 *
 * The {@link AppleSdk#getName()} strings match the ones displayed by {@code xcodebuild -showsdks}
 * and look like {@code macosx10.9}, {@code iphoneos8.0}, {@code iphonesimulator8.0},
 * etc./*w  w  w  .  jav  a  2  s .  com*/
 */
public static ImmutableMap<AppleSdk, AppleSdkPaths> discoverAppleSdkPaths(Optional<Path> developerDir,
        ImmutableList<Path> extraDirs, ImmutableMap<String, AppleToolchain> xcodeToolchains,
        AppleConfig appleConfig) throws IOException {
    Optional<AppleToolchain> defaultToolchain = Optional.ofNullable(xcodeToolchains.get(DEFAULT_TOOLCHAIN_ID));

    ImmutableMap.Builder<AppleSdk, AppleSdkPaths> appleSdkPathsBuilder = ImmutableMap.builder();

    HashSet<Path> platformPaths = new HashSet<Path>(extraDirs);
    if (developerDir.isPresent()) {
        Path platformsDir = developerDir.get().resolve("Platforms");
        LOG.debug("Searching for Xcode platforms under %s", platformsDir);
        platformPaths.add(platformsDir);
    }

    // We need to find the most recent SDK for each platform so we can
    // make the fall-back SDKs with no version number in their name
    // ("macosx", "iphonesimulator", "iphoneos").
    //
    // To do this, we store a map of (platform: [sdk1, sdk2, ...])
    // pairs where the SDKs for each platform are ordered by version.
    TreeMultimap<ApplePlatform, AppleSdk> orderedSdksForPlatform = TreeMultimap.create(Ordering.natural(),
            APPLE_SDK_VERSION_ORDERING);

    for (Path platforms : platformPaths) {
        if (!Files.exists(platforms)) {
            LOG.debug("Skipping platform search path %s that does not exist", platforms);
            continue;
        }
        LOG.debug("Searching for Xcode SDKs in %s", platforms);

        try (DirectoryStream<Path> platformStream = Files.newDirectoryStream(platforms, "*.platform")) {
            for (Path platformDir : platformStream) {
                Path developerSdksPath = platformDir.resolve("Developer/SDKs");
                try (DirectoryStream<Path> sdkStream = Files.newDirectoryStream(developerSdksPath, "*.sdk")) {
                    Set<Path> scannedSdkDirs = new HashSet<>();
                    for (Path sdkDir : sdkStream) {
                        LOG.debug("Fetching SDK name for %s", sdkDir);

                        sdkDir = sdkDir.toRealPath();
                        if (scannedSdkDirs.contains(sdkDir)) {
                            LOG.debug("Skipping already scanned SDK directory %s", sdkDir);
                            continue;
                        }

                        AppleSdk.Builder sdkBuilder = AppleSdk.builder();
                        if (buildSdkFromPath(sdkDir, sdkBuilder, xcodeToolchains, defaultToolchain,
                                appleConfig)) {
                            AppleSdk sdk = sdkBuilder.build();
                            LOG.debug("Found SDK %s", sdk);

                            AppleSdkPaths.Builder xcodePathsBuilder = AppleSdkPaths.builder();
                            for (AppleToolchain toolchain : sdk.getToolchains()) {
                                xcodePathsBuilder.addToolchainPaths(toolchain.getPath());
                            }
                            AppleSdkPaths xcodePaths = xcodePathsBuilder.setDeveloperPath(developerDir)
                                    .setPlatformPath(platformDir).setSdkPath(sdkDir).build();
                            appleSdkPathsBuilder.put(sdk, xcodePaths);
                            orderedSdksForPlatform.put(sdk.getApplePlatform(), sdk);
                        }
                        scannedSdkDirs.add(sdkDir);
                    }
                } catch (NoSuchFileException e) {
                    LOG.warn(e, "Couldn't discover SDKs at path %s, ignoring platform %s", developerSdksPath,
                            platformDir);
                }
            }
        }
    }

    // Get a snapshot of what's in appleSdkPathsBuilder, then for each
    // ApplePlatform, add to appleSdkPathsBuilder the most recent
    // SDK with an unversioned name.
    ImmutableMap<AppleSdk, AppleSdkPaths> discoveredSdkPaths = appleSdkPathsBuilder.build();

    for (ApplePlatform platform : orderedSdksForPlatform.keySet()) {
        Set<AppleSdk> platformSdks = orderedSdksForPlatform.get(platform);
        boolean shouldCreateUnversionedSdk = true;
        for (AppleSdk sdk : platformSdks) {
            shouldCreateUnversionedSdk &= !sdk.getName().equals(platform.getName());
        }

        if (shouldCreateUnversionedSdk) {
            AppleSdk mostRecentSdkForPlatform = orderedSdksForPlatform.get(platform).last();
            appleSdkPathsBuilder.put(mostRecentSdkForPlatform.withName(platform.getName()),
                    discoveredSdkPaths.get(mostRecentSdkForPlatform));
        }
    }

    // This includes both the discovered SDKs with versions in their names, as well as
    // the unversioned aliases added just above.
    return appleSdkPathsBuilder.build();
}

From source file:com.splout.db.qnode.QNodeHandlerContext.java

/**
 * Rotates the versions (deletes versions that are old or useless). To be executed at startup and after a deployment.
 *//*from w  ww  . j  a  v a2 s. c  o  m*/
public List<com.splout.db.thrift.TablespaceVersion> synchronizeTablespaceVersions()
        throws InterruptedException {
    log.info("Starting to look for old tablespace versions to remove...");

    int maxVersionsPerTablespace = config.getInt(QNodeProperties.VERSIONS_PER_TABLESPACE);

    // Will contain the list of versions per each tablespace, sorted by creation date descendant
    TreeMultimap<String, Tablespace> tablespaces = TreeMultimap.create(Ordering.natural(),
            new Comparator<Tablespace>() {
                @Override
                public int compare(Tablespace tb1, Tablespace tb2) {
                    // reverse ordering. Older dates appears LAST. If same date, then version is compared.
                    int comp = -((Long) tb1.getCreationDate()).compareTo(tb2.getCreationDate());
                    if (comp == 0) {
                        return -((Long) tb1.getVersion()).compareTo(tb2.getVersion());
                    } else {
                        return comp;
                    }
                }
            });

    Map<TablespaceVersion, Tablespace> myTablespaces = getTablespaceVersionsMap();

    // We build a in memory version of tablespaces for analyzing it
    // and prune old ones.
    for (Entry<TablespaceVersion, Tablespace> entry : myTablespaces.entrySet()) {
        tablespaces.put(entry.getKey().getTablespace(), entry.getValue());
    }
    log.info("Analyzing " + tablespaces.keySet().size() + " tablespaces with a total of " + tablespaces.size()
            + " versions...");

    // We will remove only versions older than the one being served
    Map<String, Long> hzVersionsBeingServed = coordinationStructures.getCopyVersionsBeingServed();
    if (hzVersionsBeingServed == null) {
        log.info("... No versions yet being served.");
        return null; // nothing to do yet
    }
    log.info("Number of versions being served: " + hzVersionsBeingServed.size());

    List<com.splout.db.thrift.TablespaceVersion> tablespacesToRemove = new ArrayList<com.splout.db.thrift.TablespaceVersion>();

    for (Entry<String, Long> entry : hzVersionsBeingServed.entrySet()) {
        String tablespace = entry.getKey();
        Long versionBeingServed = entry.getValue();
        // Tablespaces are sorted by creation date desc.
        SortedSet<Tablespace> allVersions = tablespaces.get(tablespace);
        Iterator<Tablespace> it = allVersions.iterator();
        boolean foundVersionBeingServed = false;
        int countVersionsAfter = 0;
        while (it.hasNext()) {
            Tablespace tb = it.next();
            if (versionBeingServed.equals(tb.getVersion())) {
                foundVersionBeingServed = true;
            } else {
                if (foundVersionBeingServed) {
                    countVersionsAfter++;
                    if (countVersionsAfter >= maxVersionsPerTablespace) {
                        // This is the case where we remove the version
                        // 1 - This tablespace has a version being served
                        // 2 - This version is older than the current tablespace being served
                        // 3 - We are already keeping maxVersionsPerTablespace versions
                        tablespacesToRemove
                                .add(new com.splout.db.thrift.TablespaceVersion(tablespace, tb.getVersion()));
                        log.info("Tablespace [" + tablespace + "] Version [" + tb.getVersion() + "] "
                                + "created at [" + new Date(tb.getCreationDate())
                                + "] REMOVED. We already keep younger versions.");
                    } else {
                        log.info("Tablespace [" + tablespace + "] Version [" + tb.getVersion() + "] "
                                + "created at [" + new Date(tb.getCreationDate()) + "] KEPT.");
                    }
                } else {
                    log.info("Tablespace [" + tablespace + "] Version [" + tb.getVersion() + "] "
                            + "created at [" + new Date(tb.getCreationDate())
                            + "] either younger than served one or without version being served. Keeping.");
                }
            }
        }

        if (!foundVersionBeingServed) {
            log.info("Tablespace [" + tablespace
                    + "] without any version being served. Please, have a look, and remove them if not used");
        }

        if (tablespacesToRemove.size() > 0) {
            log.info("Sending [" + tablespacesToRemove + "] to all alive DNodes.");
            for (DNodeInfo dnode : coordinationStructures.getDNodes().values()) {
                DNodeService.Client client = null;
                boolean renew = false;
                try {
                    client = getDNodeClientFromPool(dnode.getAddress());
                    client.deleteOldVersions(tablespacesToRemove);
                } catch (TTransportException e) {
                    renew = true;
                    log.warn("Failed sending delete TablespaceVersions order to (" + dnode
                            + "). Not critical as they will be removed after other deployments.", e);
                } catch (Exception e) {
                    log.warn("Failed sending delete TablespaceVersions order to (" + dnode
                            + "). Not critical as they will be removed after other deployments.", e);
                } finally {
                    if (client != null) {
                        returnDNodeClientToPool(dnode.getAddress(), client, renew);
                    }
                }
            }
        }
        log.info("... done looking for old tablespace versions to remove...");
    }

    return tablespacesToRemove; // Return for unit test
}

From source file:com.facebook.buck.apple.toolchain.impl.AppleSdkDiscovery.java

/**
 * Given a path to an Xcode developer directory and a map of (xctoolchain ID: path) pairs as
 * returned by {@link AppleToolchainDiscovery}, walks through the platforms and builds a map of
 * ({@link AppleSdk}: {@link AppleSdkPaths}) objects describing the paths to the SDKs inside.
 *
 * <p>The {@link AppleSdk#getName()} strings match the ones displayed by {@code xcodebuild
 * -showsdks} and look like {@code macosx10.9}, {@code iphoneos8.0}, {@code iphonesimulator8.0},
 * etc.//from ww  w. j  av  a  2s. c  o m
 */
public static ImmutableMap<AppleSdk, AppleSdkPaths> discoverAppleSdkPaths(Optional<Path> developerDir,
        ImmutableList<Path> extraDirs, ImmutableMap<String, AppleToolchain> xcodeToolchains,
        AppleConfig appleConfig) throws IOException {
    Optional<AppleToolchain> defaultToolchain = Optional.ofNullable(xcodeToolchains.get(DEFAULT_TOOLCHAIN_ID));

    ImmutableMap.Builder<AppleSdk, AppleSdkPaths> appleSdkPathsBuilder = ImmutableMap.builder();

    HashSet<Path> platformPaths = new HashSet<Path>(extraDirs);
    if (developerDir.isPresent()) {
        Path platformsDir = developerDir.get().resolve("Platforms");
        LOG.debug("Searching for Xcode platforms under %s", platformsDir);
        platformPaths.add(platformsDir);
    }

    // We need to find the most recent SDK for each platform so we can
    // make the fall-back SDKs with no version number in their name
    // ("macosx", "iphonesimulator", "iphoneos").
    //
    // To do this, we store a map of (platform: [sdk1, sdk2, ...])
    // pairs where the SDKs for each platform are ordered by version.
    TreeMultimap<ApplePlatform, AppleSdk> orderedSdksForPlatform = TreeMultimap.create(Ordering.natural(),
            APPLE_SDK_VERSION_ORDERING);

    for (Path platforms : platformPaths) {
        if (!Files.exists(platforms)) {
            LOG.debug("Skipping platform search path %s that does not exist", platforms);
            continue;
        }
        LOG.debug("Searching for Xcode SDKs in %s", platforms);

        try (DirectoryStream<Path> platformStream = Files.newDirectoryStream(platforms, "*.platform")) {
            for (Path platformDir : platformStream) {
                Path developerSdksPath = platformDir.resolve("Developer/SDKs");
                try (DirectoryStream<Path> sdkStream = Files.newDirectoryStream(developerSdksPath, "*.sdk")) {
                    Set<Path> scannedSdkDirs = new HashSet<>();
                    for (Path sdkDir : sdkStream) {
                        LOG.debug("Fetching SDK name for %s", sdkDir);

                        try {
                            sdkDir = sdkDir.toRealPath();
                        } catch (NoSuchFileException e) {
                            LOG.warn(e, "SDK at path %s is a dangling link, ignoring", sdkDir);
                            continue;
                        }
                        if (scannedSdkDirs.contains(sdkDir)) {
                            LOG.debug("Skipping already scanned SDK directory %s", sdkDir);
                            continue;
                        }

                        AppleSdk.Builder sdkBuilder = AppleSdk.builder();
                        if (buildSdkFromPath(sdkDir, sdkBuilder, xcodeToolchains, defaultToolchain,
                                appleConfig)) {
                            AppleSdk sdk = sdkBuilder.build();
                            LOG.debug("Found SDK %s", sdk);

                            AppleSdkPaths.Builder xcodePathsBuilder = AppleSdkPaths.builder();
                            for (AppleToolchain toolchain : sdk.getToolchains()) {
                                xcodePathsBuilder.addToolchainPaths(toolchain.getPath());
                            }
                            AppleSdkPaths xcodePaths = xcodePathsBuilder.setDeveloperPath(developerDir)
                                    .setPlatformPath(platformDir).setSdkPath(sdkDir).build();
                            appleSdkPathsBuilder.put(sdk, xcodePaths);
                            orderedSdksForPlatform.put(sdk.getApplePlatform(), sdk);
                        }
                        scannedSdkDirs.add(sdkDir);
                    }
                } catch (NoSuchFileException e) {
                    LOG.warn(e, "Couldn't discover SDKs at path %s, ignoring platform %s", developerSdksPath,
                            platformDir);
                }
            }
        }
    }

    // Get a snapshot of what's in appleSdkPathsBuilder, then for each
    // ApplePlatform, add to appleSdkPathsBuilder the most recent
    // SDK with an unversioned name.
    ImmutableMap<AppleSdk, AppleSdkPaths> discoveredSdkPaths = appleSdkPathsBuilder.build();

    for (ApplePlatform platform : orderedSdksForPlatform.keySet()) {
        Set<AppleSdk> platformSdks = orderedSdksForPlatform.get(platform);
        boolean shouldCreateUnversionedSdk = true;
        for (AppleSdk sdk : platformSdks) {
            shouldCreateUnversionedSdk &= !sdk.getName().equals(platform.getName());
        }

        if (shouldCreateUnversionedSdk) {
            AppleSdk mostRecentSdkForPlatform = orderedSdksForPlatform.get(platform).last();
            appleSdkPathsBuilder.put(mostRecentSdkForPlatform.withName(platform.getName()),
                    discoveredSdkPaths.get(mostRecentSdkForPlatform));
        }
    }

    // This includes both the discovered SDKs with versions in their names, as well as
    // the unversioned aliases added just above.
    return appleSdkPathsBuilder.build();
}