List of usage examples for com.google.common.collect HashBiMap put
@Override
public V put(@Nullable K key, @Nullable V value)
From source file:org.killbill.billing.plugin.meter.timeline.persistent.DefaultTimelineDao.java
@Override public BiMap<Integer, String> getSources(final TenantContext context) throws UnableToObtainConnectionException, CallbackFailedException { final HashBiMap<Integer, String> accumulator = HashBiMap.create(); for (final Map<String, Object> metric : delegate.getSources(createInternalTenantContext(context))) { accumulator.put(Integer.valueOf(metric.get("record_id").toString()), metric.get("source").toString()); }/*from w w w. ja v a 2 s .co m*/ return accumulator; }
From source file:org.killbill.billing.plugin.meter.timeline.persistent.DefaultTimelineDao.java
@Override public BiMap<Integer, CategoryRecordIdAndMetric> getMetrics(final TenantContext context) throws UnableToObtainConnectionException, CallbackFailedException { final HashBiMap<Integer, CategoryRecordIdAndMetric> accumulator = HashBiMap.create(); for (final Map<String, Object> metricInfo : delegate.getMetrics(createInternalTenantContext(context))) { accumulator.put(Integer.valueOf(metricInfo.get("record_id").toString()), new CategoryRecordIdAndMetric( (Integer) metricInfo.get("category_record_id"), metricInfo.get("metric").toString())); }//from w w w . j a v a2s. co m return accumulator; }
From source file:org.killbill.billing.plugin.meter.timeline.persistent.DefaultTimelineDao.java
@Override public BiMap<Integer, String> getEventCategories(final TenantContext context) throws UnableToObtainConnectionException, CallbackFailedException { final HashBiMap<Integer, String> accumulator = HashBiMap.create(); for (final Map<String, Object> eventCategory : delegate .getCategories(createInternalTenantContext(context))) { accumulator.put(Integer.valueOf(eventCategory.get("record_id").toString()), eventCategory.get("category").toString()); }/*from ww w . j a v a 2 s . co m*/ return accumulator; }
From source file:org.onos.yangtools.sal.binding.generator.util.BindingRuntimeContext.java
private BiMap<String, String> getEnumMapping(final Entry<GeneratedType, Object> typeWithSchema) { final TypeDefinition<?> typeDef = (TypeDefinition<?>) typeWithSchema.getValue(); final EnumerationType enumType; if (typeDef instanceof ExtendedType) { enumType = (EnumerationType) ((ExtendedType) typeDef).getBaseType(); } else {//from w ww . ja va 2 s . c o m Preconditions.checkArgument(typeDef instanceof EnumerationType); enumType = (EnumerationType) typeDef; } final HashBiMap<String, String> mappedEnums = HashBiMap.create(); for (final EnumTypeDefinition.EnumPair enumPair : enumType.getValues()) { mappedEnums.put(enumPair.getName(), BindingMapping.getClassName(enumPair.getName())); } // TODO cache these maps for future use return mappedEnums; }
From source file:ccm.nucleumOmnium.recipeStuff.ShapedOreRecipeType.java
@Override public NBTTagCompound getNBTFromRecipe(ShapedOreRecipe recipe, ItemStack newOutput) throws IllegalAccessException { NBTTagCompound nbtRecipe = new NBTTagCompound(); NBTTagList NBTInput = new NBTTagList(); int width = ShapedOreRecipe_width.getInt(recipe); int height = ShapedOreRecipe_height.getInt(recipe); /**// w w w . ja v a 2 s . c o m * Build a map to convert the object array into recipe format. */ HashBiMap<Character, Object> map = HashBiMap.create(); HashMap<ArrayList, Object> arrayListMap = new HashMap<ArrayList, Object>(); // Lookup map for oredict entries. for (Object o : recipe.getInput()) { if (o == null) continue; if (map.containsValue(o)) continue; if (o instanceof ArrayList) { for (String name : OreDictionary.getOreNames()) { if (OreDictionary.getOres(name).equals(o)) { if (map.containsValue(name)) break; map.put(DUMMY_CHARS.charAt(map.size()), name); arrayListMap.put((ArrayList) o, name); break; } } } else { map.put(DUMMY_CHARS.charAt(map.size()), o); } } /** * Make the recipe strings * aka: "aa ", "aa ", "aa " */ char[][] chars = new char[height][width]; for (int h = 0; h < height; h++) { for (int w = 0; w < width; w++) { int i = h * width + w; if (recipe.getInput()[i] == null) chars[h][w] = ' '; else if (recipe.getInput()[i] instanceof ArrayList) //noinspection SuspiciousMethodCalls chars[h][w] = map.inverse().get(arrayListMap.get(recipe.getInput()[i])); else chars[h][w] = map.inverse().get(recipe.getInput()[i]); } String line = new String(chars[h]); NBTInput.appendTag(new NBTTagString(null, line)); } nbtRecipe.setTag(NBT_input, NBTInput); /** * Add the char to itemstack thing * aka: 'a' = "plank" */ NBTTagCompound nbtMap = new NBTTagCompound(); for (Map.Entry<Character, Object> entry : map.entrySet()) { if (entry.getValue() instanceof String) nbtMap.setString(entry.getKey().toString(), entry.getValue().toString()); else if (entry.getValue() instanceof ItemStack) nbtMap.setCompoundTag(entry.getKey().toString(), ((ItemStack) entry.getValue()).writeToNBT(new NBTTagCompound())); else { NucleumOmnium.getLogger().severe("[OreDictionaryFixes] NBT RECIPE ERROR: " + entry.getValue() + " IS NOT STRING OR ITEMSTACK ???"); } } nbtRecipe.setCompoundTag(NBT_map, nbtMap); nbtRecipe.setCompoundTag(NBT_output, newOutput.writeToNBT(new NBTTagCompound())); nbtRecipe.setBoolean(NBT_mirror, ShapedOreRecipe_mirror.getBoolean(recipe)); return nbtRecipe; }
From source file:ccm.craycrafting.recipes.ShapedOreRecipeType.java
@Override public NBTTagCompound getNBTFromRecipe(ShapedOreRecipe recipe, ItemStack newOutput) throws IllegalAccessException { NBTTagCompound nbtRecipe = new NBTTagCompound(); NBTTagList NBTInput = new NBTTagList(); int width = ShapedOreRecipe_width.getInt(recipe); int height = ShapedOreRecipe_height.getInt(recipe); /**//from w w w. ja va2 s . c o m * Build a map to convert the object array into recipe format. */ HashBiMap<Character, Object> map = HashBiMap.create(); HashMap<ArrayList, Object> arrayListMap = new HashMap<ArrayList, Object>(); // Lookup map for oredict entries. for (Object o : recipe.getInput()) { if (o == null) continue; if (map.containsValue(o)) continue; if (o instanceof ArrayList) { for (String name : OreDictionary.getOreNames()) { if (OreDictionary.getOres(name).equals(o)) { if (map.containsValue(name)) break; map.put(DUMMY_CHARS.charAt(map.size()), name); arrayListMap.put((ArrayList) o, name); break; } } } else { map.put(DUMMY_CHARS.charAt(map.size()), o); } } /** * Make the recipe strings * aka: "aa ", "aa ", "aa " */ char[][] chars = new char[height][width]; for (int h = 0; h < height; h++) { for (int w = 0; w < width; w++) { int i = h * width + w; if (recipe.getInput()[i] == null) chars[h][w] = ' '; else if (recipe.getInput()[i] instanceof ArrayList) chars[h][w] = map.inverse().get(arrayListMap.get(recipe.getInput()[i])); else chars[h][w] = map.inverse().get(recipe.getInput()[i]); } String line = new String(chars[h]); NBTInput.appendTag(new NBTTagString(null, line)); } nbtRecipe.setTag(NBT_input, NBTInput); /** * Add the char to itemstack thing * aka: 'a' = "plank" */ NBTTagCompound nbtMap = new NBTTagCompound(); for (Map.Entry<Character, Object> entry : map.entrySet()) { if (entry.getValue() instanceof String) nbtMap.setString(entry.getKey().toString(), entry.getValue().toString()); else if (entry.getValue() instanceof ItemStack) nbtMap.setCompoundTag(entry.getKey().toString(), ((ItemStack) entry.getValue()).writeToNBT(new NBTTagCompound())); else { CrayCrafting.logger .severe("NBT RECIPE ERROR: " + entry.getValue() + " IS NOT STRING OR ITEMSTACK ???"); } } nbtRecipe.setCompoundTag(NBT_map, nbtMap); nbtRecipe.setCompoundTag(NBT_output, newOutput.writeToNBT(new NBTTagCompound())); nbtRecipe.setBoolean(NBT_mirror, ShapedOreRecipe_mirror.getBoolean(recipe)); return nbtRecipe; }
From source file:com.facebook.buck.tools.consistency.TargetHashFileParser.java
/** * Parses the output of `buck targets --show-target-hash` from a filename * * @param filename The file to parse/* w w w . j a va 2 s .c o m*/ * @return A parsed targets file * @throws ParseException If the file could not be read, is malformed, or has duplicate * information within it */ public ParsedTargetsFile parseFile(Path filename) throws ParseException { long startNanos = System.nanoTime(); try (BufferedReader fileReader = Files.newBufferedReader(filename)) { int expectedSize = Math.toIntExact(Files.size(filename) / 150); HashBiMap<String, String> targetsToHash = HashBiMap.create(expectedSize); while (fileReader.ready()) { String line = fileReader.readLine(); String[] parts = line.split(" "); if (parts.length != 2) { throw new ParseException(filename, "Lines must be of the format 'TARGET HASH'. Got %s", line); } if (targetsToHash.containsKey(parts[0])) { throw new ParseException(filename, "Target %s has been seen multiple times", parts[0]); } if (targetsToHash.containsValue(parts[1])) { throw new ParseException(filename, "Hash collision! Hash %s has been seen for both %s and %s!", parts[1], targetsToHash.inverse().get(parts[1]), parts[0]); } targetsToHash.put(parts[0], parts[1]); } Duration runtime = Duration.ofNanos(System.nanoTime() - startNanos); return new ParsedTargetsFile(filename, targetsToHash, runtime); } catch (IOException e) { throw new ParseException(e, filename, "Error reading file: %s", e.getMessage()); } }
From source file:org.opencb.opencga.storage.mongodb.variant.converters.DocumentToSamplesConverter.java
public Document convertToStorageType(StudyEntry studyEntry, int studyId, int fileId, Document otherFields, Set<String> samplesInFile) { Map<String, List<Integer>> genotypeCodes = new HashMap<>(); final StudyConfiguration studyConfiguration = studyConfigurations.get(studyId); boolean excludeGenotypes = studyConfiguration.getAttributes().getBoolean(Options.EXCLUDE_GENOTYPES.key(), Options.EXCLUDE_GENOTYPES.defaultValue()); boolean compressExtraParams = studyConfiguration.getAttributes().getBoolean( Options.EXTRA_GENOTYPE_FIELDS_COMPRESS.key(), Options.EXTRA_GENOTYPE_FIELDS_COMPRESS.defaultValue()); Set<String> defaultGenotype = studyDefaultGenotypeSet.get(studyId).stream().collect(Collectors.toSet()); HashBiMap<String, Integer> sampleIds = HashBiMap.create(studyConfiguration.getSampleIds()); // Classify samples by genotype int sampleIdx = 0; Integer gtIdx = studyEntry.getFormatPositions().get("GT"); List<String> studyEntryOrderedSamplesName = studyEntry.getOrderedSamplesName(); for (List<String> data : studyEntry.getSamplesData()) { String sampleName = studyEntryOrderedSamplesName.get(sampleIdx); sampleIdx++;/* www . ja va 2s .c o m*/ if (!samplesInFile.contains(sampleName)) { continue; } String genotype; if (gtIdx == null) { genotype = UNKNOWN_GENOTYPE; } else { genotype = data.get(gtIdx); } if (genotype == null) { genotype = UNKNOWN_GENOTYPE; } // Genotype g = new Genotype(genotype); List<Integer> samplesWithGenotype = genotypeCodes.get(genotype); if (samplesWithGenotype == null) { samplesWithGenotype = new ArrayList<>(); genotypeCodes.put(genotype, samplesWithGenotype); } samplesWithGenotype.add(sampleIds.get(sampleName)); } // In Mongo, samples are stored in a map, classified by their genotype. // The most common genotype will be marked as "default" and the specific // positions where it is shown will not be stored. Example from 1000G: // "def" : 0|0, // "0|1" : [ 41, 311, 342, 358, 881, 898, 903 ], // "1|0" : [ 262, 290, 300, 331, 343, 369, 374, 391, 879, 918, 930 ] Document mongoSamples = new Document(); Document mongoGenotypes = new Document(); for (Map.Entry<String, List<Integer>> entry : genotypeCodes.entrySet()) { String genotypeStr = genotypeToStorageType(entry.getKey()); if (!defaultGenotype.contains(entry.getKey())) { mongoGenotypes.append(genotypeStr, entry.getValue()); } } if (!excludeGenotypes) { mongoSamples.append(DocumentToStudyVariantEntryConverter.GENOTYPES_FIELD, mongoGenotypes); } //Position for samples in this file HashBiMap<String, Integer> samplesPosition = HashBiMap.create(); int position = 0; for (Integer sampleId : studyConfiguration.getSamplesInFiles().get(fileId)) { samplesPosition.put(studyConfiguration.getSampleIds().inverse().get(sampleId), position++); } List<String> extraFields = studyConfiguration.getAttributes() .getAsStringList(Options.EXTRA_GENOTYPE_FIELDS.key()); List<String> extraFieldsType = studyConfiguration.getAttributes() .getAsStringList(Options.EXTRA_GENOTYPE_FIELDS_TYPE.key()); for (int i = 0; i < extraFields.size(); i++) { String extraField = extraFields.get(i); String extraFieldType = i < extraFieldsType.size() ? extraFieldsType.get(i) : "String"; VariantMongoDBProto.OtherFields.Builder builder = VariantMongoDBProto.OtherFields.newBuilder(); // List<Object> values = new ArrayList<>(samplesPosition.size()); // for (int size = samplesPosition.size(); size > 0; size--) { // values.add(UNKNOWN_FIELD); // } sampleIdx = 0; if (studyEntry.getFormatPositions().containsKey(extraField)) { Integer formatIdx = studyEntry.getFormatPositions().get(extraField); for (List<String> sampleData : studyEntry.getSamplesData()) { String sampleName = studyEntryOrderedSamplesName.get(sampleIdx); sampleIdx++; if (!samplesInFile.contains(sampleName)) { continue; } // Integer index = samplesPosition.get(sampleName); String stringValue = sampleData.get(formatIdx); // Object value; // if (NumberUtils.isNumber(stringValue)) { // try { // value = Integer.parseInt(stringValue); // } catch (NumberFormatException e) { // try { // value = Double.parseDouble(stringValue); // } catch (NumberFormatException e2) { // value = stringValue; // } // } // } else { // value = stringValue; // } switch (extraFieldType) { case "Integer": { builder.addIntValues(INTEGER_COMPLEX_TYPE_CONVERTER.convertToStorageType(stringValue)); break; } case "Float": { builder.addFloatValues(FLOAT_COMPLEX_TYPE_CONVERTER.convertToStorageType(stringValue)); break; } case "String": default: builder.addStringValues(stringValue); break; } } byte[] byteArray = builder.build().toByteArray(); if (compressExtraParams) { if (byteArray.length > 50) { try { byteArray = CompressionUtils.compress(byteArray); } catch (IOException e) { throw new UncheckedIOException(e); } } } otherFields.append(extraField.toLowerCase(), byteArray); } // else { Don't set this field } } return mongoSamples; }
From source file:hudson.plugins.project_inheritance.util.svg.renderers.SVGTreeRenderer.java
@Override public Collection<SVGPrimitive> getElements() { LinkedList<SVGPrimitive> out = new LinkedList<SVGPrimitive>(); if (this.graph.getNumNodes() <= 0) { return out; }/*from w w w .j av a 2s. com*/ /* To generate a suitable tree-like graph; the following is done * * 1.) Generate a forest of left-spanning trees (LST) from the graph. * 2.) Each leaf (none or minimal inbound edges) is a root of a tree * 3.) Order the tree nodes into layers depending on their distance to * their root element. * 4.) Create SVG drawables for each node where the x-coord is * determined by their position in their layer and their y-coord * is determined by the sum of heights of the previous layers. * Leave space between each layer (y-diff) and each node (x-diff). * 5.) Now repeatedly iterate through all nodes and ensure that each * parent's center on the x-asis is over the x-axis middle of all * its children (includes children's children). * * If this is not the case do the following: * * A) If the parent is to the left of the middle; move the parent * to that middle and move all its siblings on the right along * with it * B) If the parent is to the right of the middle; move its * children right by the detected difference. * Apply the same move to the children's children. * * 6.) Add all edges from the original tree as SVGArrows between the * drawn nodes. */ //Fetch a suitable spanning tree Graph<SVGNode> span = this.graph.getSpanningTree(); //Use it to generate the drawables and add them to a mirrored STree Graph<SVGPrimitive> spanDraw = new Graph<SVGPrimitive>(); //A bimap for node/drawable lookup HashBiMap<SVGNode, SVGPrimitive> nodeLookup = HashBiMap.create(); for (SVGNode node : span.getNodes()) { //Create a drawable for that node SVGPrimitive drawable = new SVGClassBox(new Point2D.Double(0, 0), //Filled in later new TextProperty(node.getSVGLabel(), null, STYLE.BOLD, "Consolas", 16, 5), node.getSVGLabelLink(), new TextProperty(node.getSVGDetail(), null, STYLE.PLAIN, "Consolas", 16, 5), new ColorProperty(getColor(span.getEdgesFor(node).size()), this.width, 1.0, null), new Point2D.Double(10, -1), //Restrict min-width to 10px new Point2D.Double(384, -1) //Restrict max-width to 512px ); //Add the primitive to the tree; edges are filled in later spanDraw.addNode(drawable); nodeLookup.put(node, drawable); } //Transfer the egdes for (SVGNode node : span.getNodes()) { SVGPrimitive drawable = nodeLookup.get(node); for (SVGNode edge : span.getEdgesFor(node)) { spanDraw.addEdges(drawable, nodeLookup.get(edge)); } } //Start from the minimum inbound nodes and add their childs in layers LinkedList<SVGPrimitive> open = new LinkedList<SVGPrimitive>(spanDraw.getMinimalInboundEdgeNodes(null)); LinkedList<SVGPrimitive> next = new LinkedList<SVGPrimitive>(); //We need to remember the right-most siblings of each node HashMap<SVGPrimitive, LinkedList<SVGPrimitive>> siblings = new HashMap<SVGPrimitive, LinkedList<SVGPrimitive>>(); double xOffset = marginX; double yOffset = marginY; double maxHeight = 0; while (!open.isEmpty() || !next.isEmpty()) { if (open.isEmpty()) { xOffset = marginX; yOffset += maxHeight + this.deltaY; maxHeight = 0; //Swap open and next lists LinkedList<SVGPrimitive> tmp = open; open = next; next = tmp; } SVGPrimitive node = open.pop(); node.moveTo(new Point2D.Double(xOffset, yOffset)); Rectangle2D.Double bounds = node.getBounds(); if (bounds != null) { xOffset += bounds.width + this.deltaX; maxHeight = Math.max(bounds.height, maxHeight); } //Add the list of right-hand siblings to this node siblings.put(node, new LinkedList<SVGPrimitive>(open)); //Add the children of the node to the next layer's todo-list next.addAll(spanDraw.getEdgesFor(node)); } //Center all drawables above their children boolean hasMoved = true; while (hasMoved) { hasMoved = false; for (SVGPrimitive node : spanDraw.getNodes()) { //Fetch the bounds of the current node Rectangle2D.Double nodeBounds = node.getBounds(); if (nodeBounds == null) { //Node is not visible continue; } //Calculate the union bound of all direct children Rectangle2D.Double childBounds = null; for (SVGPrimitive child : spanDraw.getEdgesFor(node)) { Rectangle2D.Double childBound = child.getBounds(); if (childBound == null) { continue; } if (childBounds == null) { childBounds = (Rectangle2D.Double) childBound.clone(); } else { Rectangle2D.Double.union(childBounds, childBound, childBounds); } } if (childBounds == null) { //No children or all of them are non-visible continue; } double xDiff = nodeBounds.getCenterX() - childBounds.getCenterX(); if (Math.abs(xDiff) < 1) { //We don't care about a delta of less than one pixel continue; } Point2D.Double delta = new Point2D.Double(Math.abs(xDiff), 0); if (xDiff > 0) { //We move all children so that they're centered below the node //Do note that we also need to move the siblings of all children HashSet<SVGPrimitive> visitedNodes = new HashSet<SVGPrimitive>(); HashSet<SVGPrimitive> openNodes = new HashSet<SVGPrimitive>(spanDraw.getEdgesFor(node)); while (!openNodes.isEmpty()) { SVGPrimitive subNode = openNodes.iterator().next(); openNodes.remove(subNode); if (visitedNodes.contains(subNode)) { continue; } visitedNodes.add(subNode); subNode.translate(delta); //Adding all children of that subNode openNodes.addAll(spanDraw.getEdgesFor(subNode)); //Adding all siblings of that subNode openNodes.addAll(siblings.get(subNode)); } } else { //We move the node and all its siblings to the right node.translate(delta); for (SVGPrimitive sibling : siblings.get(node)) { sibling.translate(delta); } } hasMoved = true; } } //Add all generated boxes to the out-list out.addAll(spanDraw.getNodes()); //Add ALL edges from the original graph as arrows; not just from the STree //They are prepended to be BEHIND the boxes for (SVGNode node : this.graph.getNodes()) { SVGPrimitive dNode = nodeLookup.get(node); for (SVGNode edge : this.graph.getEdgesFor(node)) { SVGPrimitive dEdge = nodeLookup.get(edge); SVGArrow arrow = SVGArrow.createConnection(dEdge, dNode, new ColorProperty(Color.BLACK, 2.0, 1.0, null), new ArrowProperty(new ColorProperty(Color.BLACK, 2.0, 1.0, null), true, 12, 35)); out.addFirst(arrow); } } return out; }
From source file:com.haulmont.yarg.formatters.impl.XlsxFormatter.java
/** * XLSX document does not store empty cells and it might be an issue for formula calculations and etc. * So we need to create fake template cell for each empty cell. *//*from w ww . j a va 2s . c o m*/ protected void createFakeTemplateCellsForEmptyOnes(Range oneRowRange, Map<CellReference, Cell> cellsForOneRowRange, List<Cell> templateCells) { if (oneRowRange.toCellReferences().size() != templateCells.size()) { final HashBiMap<CellReference, Cell> referencesToCells = HashBiMap.create(cellsForOneRowRange); for (CellReference cellReference : oneRowRange.toCellReferences()) { if (!cellsForOneRowRange.containsKey(cellReference)) { Cell newCell = Context.getsmlObjectFactory().createCell(); newCell.setV(null); newCell.setT(STCellType.STR); newCell.setR(cellReference.toReference()); templateCells.add(newCell); referencesToCells.put(cellReference, newCell); } } Collections.sort(templateCells, new Comparator<Cell>() { @Override public int compare(Cell o1, Cell o2) { CellReference cellReference1 = referencesToCells.inverse().get(o1); CellReference cellReference2 = referencesToCells.inverse().get(o2); return cellReference1.compareTo(cellReference2); } }); } }