List of usage examples for com.google.common.collect ImmutableList indexOf
@Override public int indexOf(@Nullable Object object)
From source file:com.facebook.buck.cxx.Depfiles.java
public static int parseAndWriteBuckCompatibleDepfile(ExecutionContext context, ProjectFilesystem filesystem, HeaderPathNormalizer headerPathNormalizer, HeaderVerification headerVerification, Path sourceDepFile, Path destDepFile, Path inputPath, Path outputPath) throws IOException { // Process the dependency file, fixing up the paths, and write it out to it's final location. // The paths of the headers written out to the depfile are the paths to the symlinks from the // root of the repo if the compilation included them from the header search paths pointing to // the symlink trees, or paths to headers relative to the source file if the compilation // included them using source relative include paths. To handle both cases we check for the // prerequisites both in the values and the keys of the replacement map. Logger.get(Depfiles.class).debug("Processing dependency file %s as Makefile", sourceDepFile); ImmutableMap<String, Object> params = ImmutableMap.of("input", inputPath, "output", outputPath); try (InputStream input = filesystem.newFileInputStream(sourceDepFile); BufferedReader reader = new BufferedReader(new InputStreamReader(input)); OutputStream output = filesystem.newFileOutputStream(destDepFile); BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(output)); SimplePerfEvent.Scope perfEvent = SimplePerfEvent.scope(context.getBuckEventBus(), PerfEventId.of("depfile-parse"), params)) { ImmutableList<String> prereqs = Depfiles.parseDepfile(reader).getPrereqs(); // Additional files passed in via command-line flags (e.g. `-fsanitize-blacklist=<file>`) // appear first in the dep file, followed by the input source file. So, just skip over // everything until just after the input source which should position us at the headers. ///*w w w. jav a 2 s. com*/ // TODO(#11303454): This means we're not including the content of these special files into the // rule key. The correct way to handle this is likely to support macros in preprocessor/ // compiler flags at which point we can use the entries for these files in the depfile to // verify that the user properly references these files via the macros. int inputIndex = prereqs.indexOf(inputPath.toString()); Preconditions.checkState(inputIndex != -1, "Could not find input source (%s) in dep file prereqs (%s)", inputPath, prereqs); Iterable<String> headers = Iterables.skip(prereqs, inputIndex + 1); for (String rawHeader : headers) { Path header = Paths.get(rawHeader).normalize(); Optional<Path> absolutePath = headerPathNormalizer.getAbsolutePathForUnnormalizedPath(header); if (absolutePath.isPresent()) { Preconditions.checkState(absolutePath.get().isAbsolute()); writer.write(absolutePath.get().toString()); writer.newLine(); } else if (headerVerification.getMode() != HeaderVerification.Mode.IGNORE && !headerVerification.isWhitelisted(header.toString())) { context.getBuckEventBus() .post(ConsoleEvent.create( headerVerification.getMode() == HeaderVerification.Mode.ERROR ? Level.SEVERE : Level.WARNING, "%s: included an untracked header \"%s\"", inputPath, header)); if (headerVerification.getMode() == HeaderVerification.Mode.ERROR) { return 1; } } } } return 0; }
From source file:com.google.devtools.build.skyframe.CycleDeduper.java
/** * Marks a non-empty list representing a cycle of unique values as being seen and returns true * iff the cycle hasn't been seen before, accounting for logical equivalence of cycles. * * For example, the cycle 'a' -> 'b' -> 'c' -> 'a' is represented by the list ['a', 'b', 'c'] * and is logically equivalent to the cycle represented by the list ['b', 'c', 'a']. *///from ww w .ja v a 2s . c o m public boolean seen(ImmutableList<T> cycle) { ImmutableSet<T> cycleMembers = ImmutableSet.copyOf(cycle); Preconditions.checkState(!cycle.isEmpty()); Preconditions.checkState(cycle.size() == cycleMembers.size(), "cycle doesn't have unique members: " + cycle); if (knownCyclesByMembers.containsEntry(cycleMembers, cycle)) { return false; } // Of the C cycles, suppose there are D cycles that have the same members (but are in an // incompatible order). This code path takes O(D * L) time. The common case is that D is // very small. boolean found = false; for (ImmutableList<T> candidateCycle : knownCyclesByMembers.get(cycleMembers)) { int startPos = candidateCycle.indexOf(cycle.get(0)); // The use of a multimap keyed by cycle members guarantees that the first element of 'cycle' // is present in 'candidateCycle'. Preconditions.checkState(startPos >= 0); if (equalsWithSingleLoopFrom(cycle, candidateCycle, startPos)) { found = true; break; } } // We add the cycle even if it's a duplicate so that future exact copies of this can be // processed in O(L) time. We are already using O(CL) memory, and this optimization doesn't // change that. knownCyclesByMembers.put(cycleMembers, cycle); return !found; }
From source file:no.ssb.vtl.script.operations.join.JoinKeyExtractor.java
public JoinKeyExtractor(DataStructure childStructure, Order order, Function<Component, Component> mapper) { ImmutableList<Component> fromList = ImmutableList.copyOf(childStructure.values()); ImmutableList<Component> toList = ImmutableList.copyOf(order.keySet()); ArrayList<Integer> indices = Lists.newArrayList(); // For each component in order, find the child index. for (Component orderComponent : order.keySet()) { indices.add(toList.indexOf(orderComponent), fromList.indexOf(mapper.apply(orderComponent))); }//w ww .j a va 2s .c om this.indices = Ints.toArray(indices); this.buffer = DataPoint.create(order.size()); }
From source file:org.locationtech.geogig.plumbing.diff.Patch.java
private String featureTypeDiffAsString(FeatureTypeDiff diff) { StringBuilder sb = new StringBuilder(); sb.append(diff.toString() + "\n"); if (!diff.getNewFeatureType().equals(ObjectId.NULL) && !diff.getOldFeatureType().equals(ObjectId.NULL)) { RevFeatureType oldFeatureType = getFeatureTypeFromId(diff.getOldFeatureType()).get(); RevFeatureType newFeatureType = getFeatureTypeFromId(diff.getNewFeatureType()).get(); ImmutableList<PropertyDescriptor> oldDescriptors = oldFeatureType.descriptors(); ImmutableList<PropertyDescriptor> newDescriptors = newFeatureType.descriptors(); BitSet updatedDescriptors = new BitSet(newDescriptors.size()); for (int i = 0; i < oldDescriptors.size(); i++) { PropertyDescriptor oldDescriptor = oldDescriptors.get(i); int idx = newDescriptors.indexOf(oldDescriptor); if (idx != -1) { updatedDescriptors.set(idx); } else { Class<?> oldType = oldDescriptor.getType().getBinding(); sb.append(//from w ww . ja v a2 s.c o m "R\t" + oldDescriptors.get(i).getName().getLocalPart() + "[" + oldType.getName() + "]"); } } updatedDescriptors.flip(0, updatedDescriptors.length()); for (int i = updatedDescriptors.nextSetBit(0); i >= 0; i = updatedDescriptors.nextSetBit(i + 1)) { PropertyDescriptor newDescriptor = newDescriptors.get(i); Class<?> oldType = newDescriptor.getType().getBinding(); sb.append("A\t" + newDescriptors.get(i).getName().getLocalPart() + "[" + oldType.getName() + "]"); } } return sb.toString(); }
From source file:org.geogit.api.plumbing.diff.Patch.java
private String featureTypeDiffAsString(FeatureTypeDiff diff) { StringBuilder sb = new StringBuilder(); sb.append(diff.toString() + "\n"); if (!diff.getNewFeatureType().equals(ObjectId.NULL) && !diff.getOldFeatureType().equals(ObjectId.NULL)) { RevFeatureType oldFeatureType = getFeatureTypeFromId(diff.getOldFeatureType()).get(); RevFeatureType newFeatureType = getFeatureTypeFromId(diff.getNewFeatureType()).get(); ImmutableList<PropertyDescriptor> oldDescriptors = oldFeatureType.sortedDescriptors(); ImmutableList<PropertyDescriptor> newDescriptors = newFeatureType.sortedDescriptors(); BitSet updatedDescriptors = new BitSet(newDescriptors.size()); for (int i = 0; i < oldDescriptors.size(); i++) { PropertyDescriptor oldDescriptor = oldDescriptors.get(i); int idx = newDescriptors.indexOf(oldDescriptor); if (idx != -1) { updatedDescriptors.set(idx); } else { Class<?> oldType = oldDescriptor.getType().getBinding(); sb.append(// ww w.j av a 2s .com "R\t" + oldDescriptors.get(i).getName().getLocalPart() + "[" + oldType.getName() + "]"); } } updatedDescriptors.flip(0, updatedDescriptors.length()); for (int i = updatedDescriptors.nextSetBit(0); i >= 0; i = updatedDescriptors.nextSetBit(i + 1)) { PropertyDescriptor newDescriptor = newDescriptors.get(i); Class<?> oldType = newDescriptor.getType().getBinding(); sb.append("A\t" + newDescriptors.get(i).getName().getLocalPart() + "[" + oldType.getName() + "]"); } } return sb.toString(); }
From source file:org.geogit.api.plumbing.diff.FeatureDiff.java
/** * /*from w w w . ja v a 2 s.c o m*/ * @param path the full path to the feature, including its name * @param newRevFeature the new version of the feature * @param oldRevFeature the old version of the feature * @param newRevFeatureType the new version of the feature type * @param oldRevFeatureType the old version of the feature type * @param all - true if all attributes should be added regardless of change */ public FeatureDiff(String path, RevFeature newRevFeature, RevFeature oldRevFeature, RevFeatureType newRevFeatureType, RevFeatureType oldRevFeatureType, boolean all) { this.path = path; this.newFeatureType = newRevFeatureType; this.oldFeatureType = oldRevFeatureType; diffs = new HashMap<PropertyDescriptor, AttributeDiff>(); ImmutableList<PropertyDescriptor> oldAttributes = oldRevFeatureType.sortedDescriptors(); ImmutableList<PropertyDescriptor> newAttributes = newRevFeatureType.sortedDescriptors(); ImmutableList<Optional<Object>> oldValues = oldRevFeature.getValues(); ImmutableList<Optional<Object>> newValues = newRevFeature.getValues(); BitSet updatedAttributes = new BitSet(newValues.size()); for (int i = 0; i < oldAttributes.size(); i++) { Optional<Object> oldValue = oldValues.get(i); int idx = newAttributes.indexOf(oldAttributes.get(i)); if (idx != -1) { Optional<Object> newValue = newValues.get(idx); if (!oldValue.equals(newValue) || all) { if (Geometry.class.isAssignableFrom(oldAttributes.get(i).getType().getBinding())) { diffs.put(oldAttributes.get(i), new GeometryAttributeDiff(Optional.fromNullable((Geometry) oldValue.orNull()), Optional.fromNullable((Geometry) newValue.orNull()))); } else { diffs.put(oldAttributes.get(i), new GenericAttributeDiffImpl(oldValue, newValue)); } } updatedAttributes.set(idx); } else { if (Geometry.class.isAssignableFrom(oldAttributes.get(i).getType().getBinding())) { diffs.put(oldAttributes.get(i), new GeometryAttributeDiff(Optional.fromNullable((Geometry) oldValue.orNull()), Optional.fromNullable((Geometry) null))); } else { diffs.put(oldAttributes.get(i), new GenericAttributeDiffImpl(oldValue, null)); } } } updatedAttributes.flip(0, newValues.size()); for (int i = updatedAttributes.nextSetBit(0); i >= 0; i = updatedAttributes.nextSetBit(i + 1)) { if (Geometry.class.isAssignableFrom(newAttributes.get(i).getType().getBinding())) { diffs.put(oldAttributes.get(i), new GeometryAttributeDiff(Optional.fromNullable((Geometry) null), Optional.fromNullable((Geometry) newValues.get(i).orNull()))); } else { diffs.put(newAttributes.get(i), new GenericAttributeDiffImpl(null, newValues.get(i))); } } }
From source file:org.locationtech.geogig.api.plumbing.diff.FeatureDiff.java
/** * /*from w ww . j a v a 2 s . c o m*/ * @param path the full path to the feature, including its name * @param newRevFeature the new version of the feature * @param oldRevFeature the old version of the feature * @param newRevFeatureType the new version of the feature type * @param oldRevFeatureType the old version of the feature type * @param all - true if all attributes should be added regardless of change */ public FeatureDiff(String path, RevFeature newRevFeature, RevFeature oldRevFeature, RevFeatureType newRevFeatureType, RevFeatureType oldRevFeatureType, boolean all) { this.path = path; this.newFeatureType = newRevFeatureType; this.oldFeatureType = oldRevFeatureType; diffs = new HashMap<PropertyDescriptor, AttributeDiff>(); ImmutableList<PropertyDescriptor> oldAttributes = oldRevFeatureType.sortedDescriptors(); ImmutableList<PropertyDescriptor> newAttributes = newRevFeatureType.sortedDescriptors(); ImmutableList<Optional<Object>> oldValues = oldRevFeature.getValues(); ImmutableList<Optional<Object>> newValues = newRevFeature.getValues(); BitSet updatedAttributes = new BitSet(newValues.size()); for (int i = 0; i < oldAttributes.size(); i++) { Optional<Object> oldValue = oldValues.get(i); int idx = newAttributes.indexOf(oldAttributes.get(i)); if (idx != -1) { Optional<Object> newValue = newValues.get(idx); if (!oldValue.equals(newValue) || all) { if (Geometry.class.isAssignableFrom(oldAttributes.get(i).getType().getBinding())) { diffs.put(oldAttributes.get(i), new GeometryAttributeDiff(Optional.fromNullable((Geometry) oldValue.orNull()), Optional.fromNullable((Geometry) newValue.orNull()))); } else { diffs.put(oldAttributes.get(i), new GenericAttributeDiffImpl(oldValue, newValue)); } } updatedAttributes.set(idx); } else { if (Geometry.class.isAssignableFrom(oldAttributes.get(i).getType().getBinding())) { diffs.put(oldAttributes.get(i), new GeometryAttributeDiff(Optional.fromNullable((Geometry) oldValue.orNull()), Optional.fromNullable((Geometry) null))); } else { diffs.put(oldAttributes.get(i), new GenericAttributeDiffImpl(oldValue, null)); } } } updatedAttributes.flip(0, newValues.size()); for (int i = updatedAttributes.nextSetBit(0); i >= 0; i = updatedAttributes.nextSetBit(i + 1)) { if (Geometry.class.isAssignableFrom(newAttributes.get(i).getType().getBinding())) { diffs.put(newAttributes.get(i), new GeometryAttributeDiff(Optional.fromNullable((Geometry) null), Optional.fromNullable((Geometry) newValues.get(i).orNull()))); } else { diffs.put(newAttributes.get(i), new GenericAttributeDiffImpl(null, newValues.get(i))); } } }
From source file:no.ssb.vtl.model.DatapointNormalizer.java
public DatapointNormalizer(DataStructure from, DataStructure to, Predicate<String> predicate) { checkNotNull(from);/* w w w .j a v a 2 s .c om*/ checkArgument(!from.isEmpty()); checkNotNull(to); ImmutableList<String> fromList = ImmutableSet.copyOf(from.keySet()).asList(); ImmutableList<String> toList = ImmutableSet.copyOf(to.keySet()).asList(); // Make sure that both structures contains the columns we are mapping. Set<String> filteredFrom = Sets.filter(from.keySet(), predicate::test); Set<String> filteredTo = Sets.filter(to.keySet(), predicate::test); checkArgument(filteredFrom.containsAll(filteredTo)); // build indices ArrayList<Integer> fromIndices = Lists.newArrayList(); ArrayList<Integer> toIndices = Lists.newArrayList(); for (String fromName : filteredFrom) { int fromIndex = fromList.indexOf(fromName); int toIndex = toList.indexOf(fromName); if (fromIndex == toIndex) continue; fromIndices.add(fromIndex); toIndices.add(toIndex); } this.fromIndices = Ints.toArray(fromIndices); this.toIndices = Ints.toArray(toIndices); }
From source file:fr.inria.linuxtools.ctf.core.event.EventDefinition.java
/** * Gets the context of this event within a stream * * @return the context in struct form/*from w ww.j a v a2 s.c o m*/ */ public StructDefinition getContext() { /* Most common case so far */ if (fStreamContext == null) { return fEventContext; } /* streamContext is not null, but the context of the event is null */ if (fEventContext == null) { return fStreamContext; } // TODO: cache if this is a performance issue /* The stream context and event context are assigned. */ StructDeclaration mergedDeclaration = new StructDeclaration(1); Builder<String> builder = ImmutableList.<String>builder(); List<Definition> fieldValues = new ArrayList<>(); /* Add fields from the stream */ for (String fieldName : fStreamContext.getFieldNames()) { Definition definition = fStreamContext.getDefinition(fieldName); mergedDeclaration.addField(fieldName, definition.getDeclaration()); builder.add(fieldName); fieldValues.add(definition); } ImmutableList<String> fieldNames = builder.build(); /* * Add fields from the event context, overwrite the stream ones if * needed. */ for (String fieldName : fEventContext.getFieldNames()) { Definition definition = fEventContext.getDefinition(fieldName); mergedDeclaration.addField(fieldName, definition.getDeclaration()); if (fieldNames.contains(fieldName)) { fieldValues.set((fieldNames.indexOf(fieldName)), definition); } else { builder.add(fieldName); fieldValues.add(definition); } } fieldNames = builder.build(); StructDefinition mergedContext = new StructDefinition(mergedDeclaration, this, "context", //$NON-NLS-1$ fieldNames, fieldValues.toArray(new Definition[fieldValues.size()])); return mergedContext; }
From source file:org.geogit.geotools.plumbing.ImportOp.java
/** * Translates a feature pointed by a node from its original feature type to a given one, using * values from those attributes that exist in both original and destination feature type. New * attributes are populated with null values * // w ww . j a v a 2 s .c om * @param node The node that points to the feature. No checking is performed to ensure the node * points to a feature instead of other type * @param featureType the destination feature type * @return a feature with the passed feature type and data taken from the input feature */ private Feature alter(NodeRef node, RevFeatureType featureType) { RevFeature oldFeature = command(RevObjectParse.class).setObjectId(node.objectId()).call(RevFeature.class) .get(); RevFeatureType oldFeatureType; oldFeatureType = command(RevObjectParse.class).setObjectId(node.getMetadataId()).call(RevFeatureType.class) .get(); ImmutableList<PropertyDescriptor> oldAttributes = oldFeatureType.sortedDescriptors(); ImmutableList<PropertyDescriptor> newAttributes = featureType.sortedDescriptors(); ImmutableList<Optional<Object>> oldValues = oldFeature.getValues(); List<Optional<Object>> newValues = Lists.newArrayList(); for (int i = 0; i < newAttributes.size(); i++) { int idx = oldAttributes.indexOf(newAttributes.get(i)); if (idx != -1) { Optional<Object> oldValue = oldValues.get(idx); newValues.add(oldValue); } else { newValues.add(Optional.absent()); } } RevFeature newFeature = RevFeature.build(ImmutableList.copyOf(newValues)); FeatureBuilder featureBuilder = new FeatureBuilder(featureType); Feature feature = featureBuilder.build(node.name(), newFeature); return feature; }