List of usage examples for com.google.common.collect SetMultimap keySet
Set<K> keySet();
From source file:org.apache.flume.channel.file.EventQueueBackingStoreFile.java
public static void main(String[] args) throws Exception { File file = new File(args[0]); File inflightTakesFile = new File(args[1]); File inflightPutsFile = new File(args[2]); if (!file.exists()) { throw new IOException("File " + file + " does not exist"); }// w w w. j av a 2 s .c o m if (file.length() == 0) { throw new IOException("File " + file + " is empty"); } int capacity = (int) ((file.length() - (HEADER_SIZE * 8L)) / 8L); EventQueueBackingStoreFile backingStore = (EventQueueBackingStoreFile) EventQueueBackingStoreFactory .get(file, capacity, "debug", false); System.out.println("File Reference Counts" + backingStore.logFileIDReferenceCounts); System.out.println("Queue Capacity " + backingStore.getCapacity()); System.out.println("Queue Size " + backingStore.getSize()); System.out.println("Queue Head " + backingStore.getHead()); for (int index = 0; index < backingStore.getCapacity(); index++) { long value = backingStore.get(backingStore.getPhysicalIndex(index)); int fileID = (int) (value >>> 32); int offset = (int) value; System.out.println( index + ":" + Long.toHexString(value) + " fileID = " + fileID + ", offset = " + offset); } FlumeEventQueue queue = new FlumeEventQueue(backingStore, inflightTakesFile, inflightPutsFile); SetMultimap<Long, Long> putMap = queue.deserializeInflightPuts(); System.out.println("Inflight Puts:"); for (Long txnID : putMap.keySet()) { Set<Long> puts = putMap.get(txnID); System.out.println("Transaction ID: " + String.valueOf(txnID)); for (long value : puts) { int fileID = (int) (value >>> 32); int offset = (int) value; System.out.println(Long.toHexString(value) + " fileID = " + fileID + ", offset = " + offset); } } SetMultimap<Long, Long> takeMap = queue.deserializeInflightTakes(); System.out.println("Inflight takes:"); for (Long txnID : takeMap.keySet()) { Set<Long> takes = takeMap.get(txnID); System.out.println("Transaction ID: " + String.valueOf(txnID)); for (long value : takes) { int fileID = (int) (value >>> 32); int offset = (int) value; System.out.println(Long.toHexString(value) + " fileID = " + fileID + ", offset = " + offset); } } }
From source file:org.jboss.weld.util.collections.ArraySetSupplier.java
/** * Helper method which will trim each set in the multimap to its current size. * * @param <K> Key type/* w w w. j av a 2 s . com*/ * @param <V> Value type * @param multimap the set multimap using ArraySet<V> as the values */ public static <K, V> void trimSetsToSize(SetMultimap<K, V> multimap) { for (K key : multimap.keySet()) { if (multimap.get(key) instanceof ArraySet<?>) { ((ArraySet<?>) multimap.get(key)).trimToSize(); } } }
From source file:org.apache.gobblin.data.management.copy.hive.WhitelistBlacklist.java
private static boolean multimapContains(SetMultimap<Pattern, Pattern> multimap, String database, Optional<String> table, boolean blacklist) { for (Pattern dbPattern : multimap.keySet()) { if (dbPattern.matcher(database).matches()) { if (!table.isPresent()) { // if we are only matching database return !blacklist || multimap.get(dbPattern).contains(ALL_TABLES); }//from w ww . j a v a 2 s . com for (Pattern tablePattern : multimap.get(dbPattern)) { if (tablePattern.matcher(table.get()).matches()) { return true; } } } } return false; }
From source file:com.wrmsr.wava.basic.BasicLoopInfo.java
public static Map<Name, Name> getLoopParents(SetMultimap<Name, Name> loopContents) { Map<Name, Name> loopParents = new HashMap<>(); Map<Name, Set<Name>> map = loopContents.keySet().stream() .collect(toHashMap(identity(), loop -> new HashSet<>())); for (Name cur : loopContents.keySet()) { map.get(cur).add(ENTRY_NAME);//from w ww . ja v a 2 s. c o m Set<Name> children = loopContents.get(cur); for (Name child : children) { if (!cur.equals(child) && loopContents.containsKey(child)) { map.get(child).add(cur); } } } Map<Name, Integer> loopDepths = map.entrySet().stream() .collect(toHashMap(entry -> entry.getKey(), entry -> entry.getValue().size())); loopDepths.put(ENTRY_NAME, 0); int maxDepth = loopDepths.values().stream().mapToInt(Integer::intValue).max().orElse(0); List<List<Name>> depthLoopsLists = IntStream.range(0, maxDepth + 1).boxed() .<List<Name>>map(i -> new ArrayList<>()).collect(toArrayList()); loopDepths.forEach((loop, depth) -> depthLoopsLists.get(depth).add(loop)); Set<Name> seen = new HashSet<>(); for (int depth = 1; depth < depthLoopsLists.size(); ++depth) { for (Name loop : depthLoopsLists.get(depth)) { Name parent = getOnlyElement(Sets.difference(map.get(loop), seen)); checkState(loopDepths.get(parent) == depth - 1); loopParents.put(loop, parent); } seen.addAll(depthLoopsLists.get(depth - 1)); } checkState(loopContents.keySet().equals(loopParents.keySet())); return loopParents; }
From source file:com.google.caliper.core.BenchmarkClassModel.java
/** * Validates the given user-provided parameters against the parameter fields on the benchmark * class./*from www. ja va2s .c o m*/ */ public static void validateUserParameters(Class<?> clazz, SetMultimap<String, String> userParameters) { for (String paramName : userParameters.keySet()) { try { Field field = clazz.getDeclaredField(paramName); Parameters.validate(field, userParameters.get(paramName)); } catch (NoSuchFieldException e) { throw new InvalidCommandException("unrecognized parameter: " + paramName); } catch (InvalidBenchmarkException e) { // TODO(kevinb): this is weird. throw new InvalidCommandException(e.getMessage()); } } }
From source file:com.addthis.hydra.query.MeshFileRefCache.java
/** * This method filters the file references to ensure that only valid file references are returned. * <p/>/*from ww w .j a v a2s .c o m*/ * The filter checks for two things. * <p/> * <ol> * <li>the last modified date for each file for the same task should be the same, if not it will take the * newest file</li> * <li>the size of the files should be equal, if not, take the files with the largest known size</li> * </ol> * * @param fileRefDataSet - the original unfiltered file reference set * @return - filtered file reference map containing only valid file references */ @Nonnull protected static SetMultimap<Integer, FileReference> filterFileReferences( @Nonnull SetMultimap<Integer, FileReference> fileRefDataSet) { if (fileRefDataSet.isEmpty()) { return fileRefDataSet; } int baseKeySetSize = fileRefDataSet.keySet().size(); SetMultimap<Integer, FileReference> filteredFileReferenceSet = HashMultimap.create(baseKeySetSize, fileRefDataSet.size() / baseKeySetSize); for (Map.Entry<Integer, Collection<FileReference>> entry : fileRefDataSet.asMap().entrySet()) { int key = entry.getKey(); final Collection<FileReference> fileReferences = entry.getValue(); long mostRecentTime = -1; for (FileReference fileReference : fileReferences) { if ((mostRecentTime < 0) || (fileReference.lastModified > mostRecentTime)) { mostRecentTime = fileReference.lastModified; } } final long mostRecentTimeF = mostRecentTime; Predicate<FileReference> isMostRecent = input -> (input != null) && (input.lastModified == mostRecentTimeF); Collection<FileReference> filteredFileReferences = Collections2.filter(fileReferences, isMostRecent); filteredFileReferenceSet.putAll(key, filteredFileReferences); } return filteredFileReferenceSet; }
From source file:com.google.gerrit.server.index.change.StalenessChecker.java
@VisibleForTesting static boolean refsAreStale(GitRepositoryManager repoManager, Change.Id id, SetMultimap<Project.NameKey, RefState> states, ListMultimap<Project.NameKey, RefStatePattern> patterns) { Set<Project.NameKey> projects = Sets.union(states.keySet(), patterns.keySet()); for (Project.NameKey p : projects) { if (refsAreStale(repoManager, id, p, states, patterns)) { return true; }/* w w w . j av a2 s.c om*/ } return false; }
From source file:eu.esdihumboldt.hale.common.align.tgraph.impl.internal.TGraphFactory.java
/** * Create a transformation graph from a transformation tree. * /*ww w .jav a 2s . c o m*/ * @param ttree the transformation tree * @param functionService the function service * @return an in-memory graph created from the transformation tree */ public static Graph create(TransformationTree ttree, FunctionService functionService) { TreeToGraphVisitor graphVisitor = new TreeToGraphVisitor(functionService); ttree.accept(graphVisitor); SetMultimap<String, String> connections = graphVisitor.getAllConnections(); Set<String> ids = graphVisitor.getAllIds(); Graph graph = new TinkerGraph(); // add nodes to the graph for (String key : ids) { // create a vertex for each transformation node TransformationNode node = graphVisitor.getNode(key); Vertex vertex = graph.addVertex(key); setVertexProperties(vertex, node); } for (String key : connections.keySet()) { for (String value : connections.get(key)) { Vertex targetSide = graph.getVertex(key); Vertex sourceSide = graph.getVertex(value); TransformationNode targetSideNode = graphVisitor.getNode(key); TransformationNode sourceSideNode = graphVisitor.getNode(value); String edgeLabel; if (sourceSideNode instanceof SourceNode && targetSideNode instanceof SourceNode) { edgeLabel = EDGE_CHILD; } else if (sourceSideNode instanceof SourceNode && targetSideNode instanceof CellNode) { edgeLabel = EDGE_VARIABLE; } else if (sourceSideNode instanceof CellNode && targetSideNode instanceof GroupNode) { edgeLabel = EDGE_RESULT; } else if (sourceSideNode instanceof GroupNode && targetSideNode instanceof GroupNode) { edgeLabel = EDGE_PARENT; } else { throw new IllegalStateException("Invalid relation in transformation tree"); } Edge edge = graph.addEdge(null, sourceSide, targetSide, edgeLabel); setEdgeProperties(edge, sourceSideNode, targetSideNode); } } return graph; }
From source file:eu.esdihumboldt.hale.ui.style.StyleHelper.java
/** * Returns a default style for the given type. * //from ww w . ja v a 2 s . com * @param dataSetTypes type definitions associated to their data set * @return the style */ public static Style getSpectrumStyles(SetMultimap<DataSet, TypeDefinition> dataSetTypes) { int defWidth = StylePreferences.getDefaultWidth(); Style style = styleFactory.createStyle(); GeometrySchemaService gss = PlatformUI.getWorkbench().getService(GeometrySchemaService.class); for (DataSet dataSet : dataSetTypes.keySet()) { float saturation; float brightness; switch (dataSet) { case TRANSFORMED: saturation = 0.8f; brightness = 0.6f; break; case SOURCE: default: saturation = 0.75f; brightness = 0.8f; break; } Set<TypeDefinition> types = new HashSet<>(dataSetTypes.get(dataSet)); Iterator<TypeDefinition> it = types.iterator(); while (it.hasNext()) { TypeDefinition type = it.next(); // remove invalid types if (type.getConstraint(AbstractFlag.class).isEnabled() || gss.getDefaultGeometry(type) == null) { it.remove(); } } int numberOfTypes = types.size(); int index = 0; for (TypeDefinition typeDef : types) { FeatureTypeStyle fts; // TODO based on default geometry? // polygon is always OK as it contains stroke and fill // Color color = generateRandomColor(Color.WHITE); Color color; if (numberOfTypes == 1) { color = generateRandomColor(saturation, brightness); } else { color = Color.getHSBColor((float) index / (float) numberOfTypes, saturation, brightness); } fts = createPolygonStyle(color, defWidth); fts.featureTypeNames().add(new NameImpl(getFeatureTypeName(typeDef))); style.featureTypeStyles().add(fts); index++; } } return style; }
From source file:org.apache.kylin.source.hive.HiveSourceTableLoader.java
public static Set<String> loadHiveTables(String[] hiveTables, KylinConfig config) throws IOException { SetMultimap<String, String> db2tables = LinkedHashMultimap.create(); for (String fullTableName : hiveTables) { String[] parts = HadoopUtil.parseHiveTableName(fullTableName); db2tables.put(parts[0], parts[1]); }//from w w w .jav a 2 s.c o m IHiveClient hiveClient = HiveClientFactory.getHiveClient(); SchemaChecker checker = new SchemaChecker(hiveClient, MetadataManager.getInstance(config), CubeManager.getInstance(config)); for (Map.Entry<String, String> entry : db2tables.entries()) { SchemaChecker.CheckResult result = checker.allowReload(entry.getKey(), entry.getValue()); result.raiseExceptionWhenInvalid(); } // extract from hive Set<String> loadedTables = Sets.newHashSet(); for (String database : db2tables.keySet()) { List<String> loaded = extractHiveTables(database, db2tables.get(database), hiveClient); loadedTables.addAll(loaded); } return loadedTables; }