List of usage examples for com.google.common.collect ImmutableList get
E get(int index);
From source file:msi.gaml.factories.ModelAssembler.java
public ModelDescription assemble(final String projectPath, final String modelPath, final Iterable<ISyntacticElement> allModels, final ValidationContext collector, final boolean document, final Map<String, ModelDescription> mm) { final ImmutableList<ISyntacticElement> models = ImmutableList.copyOf(allModels); final TOrderedHashMap<String, ISyntacticElement> speciesNodes = new TOrderedHashMap(); final TOrderedHashMap<String, TOrderedHashMap<String, ISyntacticElement>>[] experimentNodes = new TOrderedHashMap[1]; final ISyntacticElement globalNodes = SyntacticFactory.create(GLOBAL, (EObject) null, true); final ISyntacticElement source = models.get(0); Facets globalFacets = null;//from w w w.j a v a 2 s. c om if (source.hasFacet(IKeyword.PRAGMA)) { final Facets facets = source.copyFacets(null); final List<String> pragmas = (List<String>) facets.get(IKeyword.PRAGMA).getExpression().getConstValue(); collector.resetInfoAndWarning(); if (pragmas != null) { if (pragmas.contains(IKeyword.NO_INFO)) { collector.setNoInfo(); } if (pragmas.contains(IKeyword.NO_WARNING)) { collector.setNoWarning(); } if (pragmas.contains(IKeyword.NO_EXPERIMENT)) { collector.setNoExperiment(); } } } final Map<String, SpeciesDescription> tempSpeciesCache = new THashMap<>(); for (final ISyntacticElement cm : models.reverse()) { final SyntacticModelElement currentModel = (SyntacticModelElement) cm; if (currentModel != null) { if (currentModel.hasFacets()) { if (globalFacets == null) { globalFacets = new Facets(currentModel.copyFacets(null)); } else { globalFacets.putAll(currentModel.copyFacets(null)); } } currentModel.visitChildren(element -> globalNodes.addChild(element)); SyntacticVisitor visitor = element -> addSpeciesNode(element, speciesNodes, collector); currentModel.visitSpecies(visitor); // We input the species so that grids are always the last ones // (see DiffusionStatement) currentModel.visitGrids(visitor); visitor = element -> { if (experimentNodes[0] == null) { experimentNodes[0] = new TOrderedHashMap(); } addExperimentNode(element, currentModel.getName(), experimentNodes[0], collector); }; currentModel.visitExperiments(visitor); } } final String modelName = buildModelName(source.getName()); // We build a list of working paths from which the composite model will // be able to look for resources. These working paths come from the // imported models Set<String> absoluteAlternatePathAsStrings = models.isEmpty() ? null : ImmutableSet.copyOf( Iterables.transform(models.reverse(), each -> ((SyntacticModelElement) each).getPath())); if (mm != null) { for (final ModelDescription m1 : mm.values()) { for (final String im : m1.getAlternatePaths()) { absoluteAlternatePathAsStrings = Sets.union(absoluteAlternatePathAsStrings, Collections.singleton(im)); } } } final ModelDescription model = new ModelDescription(modelName, null, projectPath, modelPath, source.getElement(), null, ModelDescription.ROOT, null, globalFacets, collector, absoluteAlternatePathAsStrings); final Collection<String> allModelNames = models.size() == 1 ? null : ImmutableSet.copyOf( Iterables.transform(Iterables.skip(models, 1), each -> buildModelName(each.getName()))); model.setImportedModelNames(allModelNames); model.isDocumenting(document); // hqnghi add micro-models if (mm != null) { // model.setMicroModels(mm); model.addChildren(mm.values()); } // end-hqnghi // recursively add user-defined species to world and down on to the // hierarchy speciesNodes.forEachValue(speciesNode -> { addMicroSpecies(model, speciesNode, tempSpeciesCache); return true; }); if (experimentNodes[0] != null) { experimentNodes[0].forEachEntry((s, b) -> { b.forEachValue(experimentNode -> { addExperiment(s, model, experimentNode, tempSpeciesCache); return true; }); return true; }); } // Parent the species and the experiments of the model (all are now // known). speciesNodes.forEachValue(speciesNode -> { parentSpecies(model, speciesNode, model, tempSpeciesCache); return true; }); if (experimentNodes[0] != null) { experimentNodes[0].forEachEntry((s, b) -> { b.forEachValue(experimentNode -> { parentExperiment(model, experimentNode); return true; }); return true; }); } // Initialize the hierarchy of types model.buildTypes(); // hqnghi build micro-models as types if (mm != null) { for (final Entry<String, ModelDescription> entry : mm.entrySet()) { model.getTypesManager().alias(entry.getValue().getName(), entry.getKey()); } // end-hqnghi } // Make species and experiments recursively create their attributes, // actions.... complementSpecies(model, globalNodes); speciesNodes.forEachValue(speciesNode -> { complementSpecies(model.getMicroSpecies(speciesNode.getName()), speciesNode); return true; }); if (experimentNodes[0] != null) { experimentNodes[0].forEachEntry((s, b) -> { b.forEachValue(experimentNode -> { complementSpecies(model.getExperiment(experimentNode.getName()), experimentNode); return true; }); return true; }); } // Complement recursively the different species (incl. the world). The // recursion is hierarchical model.inheritFromParent(); for (final SpeciesDescription sd : getSpeciesInHierarchicalOrder(model)) { sd.inheritFromParent(); if (sd.isExperiment()) { if (!sd.finalizeDescription()) { return null; } } } // Issue #1708 (put before the finalization) if (model.hasFacet(SCHEDULES) || model.hasFacet(FREQUENCY)) { createSchedulerSpecies(model); } if (!model.finalizeDescription()) { return null; } if (document) { collector.document(model); } return model; }
From source file:com.amazonaws.services.kinesis.stormspout.state.zookeeper.ZookeeperStateManager.java
private ImmutableList<String> getShardAssignment() { final ImmutableList.Builder<String> builder = new ImmutableList.Builder<>(); ImmutableList<String> shardList; // Note that this uses ZK, not DescribeStream API. This ensures that all // tasks share a consistent (although possibly outdated) view of the stream. try {/*from ww w. j a v a 2 s.co m*/ shardList = zk.getShardList(); LOG.info(this + " Got shardList: " + shardList); } catch (Exception e) { LOG.error(this + " could not compute shard assigment: could not retrieve shard list" + " from ZK.", e); throw new KinesisSpoutException(e); } for (int i = taskIndex; i < shardList.size(); i += totalNumTasks) { builder.add(shardList.get(i)); } return builder.build(); }
From source file:com.opengamma.strata.finance.rate.bond.FixedCouponBond.java
@Override public ExpandedFixedCouponBond expand() { Schedule adjustedSchedule = periodicSchedule.createSchedule(); Schedule unadjustedSchedule = adjustedSchedule.toUnadjusted(); ImmutableList.Builder<FixedCouponBondPaymentPeriod> accrualPeriods = ImmutableList.builder(); for (int i = 0; i < adjustedSchedule.size(); i++) { SchedulePeriod period = adjustedSchedule.getPeriod(i); SchedulePeriod unadjustedPeriod = SchedulePeriod.of(period.getUnadjustedStartDate(), period.getUnadjustedEndDate()); accrualPeriods/*from w w w . j a v a 2 s . c om*/ .add(FixedCouponBondPaymentPeriod.builder().unadjustedStartDate(period.getUnadjustedStartDate()) .unadjustedEndDate(period.getUnadjustedEndDate()).startDate(period.getStartDate()) .endDate(period.getEndDate()).detachmentDate(exCouponPeriod.adjust(period.getEndDate())) .notional(notional).currency(currency).fixedRate(fixedRate) .yearFraction(unadjustedPeriod.yearFraction(dayCount, unadjustedSchedule)).build()); } ImmutableList<FixedCouponBondPaymentPeriod> periodicPayments = accrualPeriods.build(); FixedCouponBondPaymentPeriod lastPeriod = periodicPayments.get(periodicPayments.size() - 1); Payment nominalPayment = Payment.of(CurrencyAmount.of(currency, notional), lastPeriod.getPaymentDate()); return ExpandedFixedCouponBond.builder().legalEntityId(legalEntityId).nominalPayment(nominalPayment) .periodicPayments(ImmutableList.copyOf(periodicPayments)).dayCount(dayCount) .yieldConvention(yieldConvention).settlementDateOffset(settlementDateOffset).build(); }
From source file:com.freiheit.fuava.simplebatch.processor.RetryingProcessor.java
private Iterable<Result<OriginalItem, Output>> doPersist(final Iterable<Result<OriginalItem, Input>> iterable) { final ImmutableList<Result<OriginalItem, Input>> successes = FluentIterable.from(iterable) .filter(Result::isSuccess).toList(); final ImmutableList<Result<OriginalItem, Input>> fails = FluentIterable.from(iterable) .filter(Result::isFailed).toList(); final ImmutableList<Input> outputs = getSuccessOutputs(successes); final List<Output> persistenceResults = outputs.isEmpty() ? ImmutableList.of() : apply(outputs); if (persistenceResults.size() != outputs.size() || persistenceResults.size() != successes.size()) { throw new IllegalStateException("persistence results of unexpected size produced by " + this); }/*from w ww .ja v a 2 s . c om*/ final ImmutableList.Builder<Result<OriginalItem, Output>> b = ImmutableList.builder(); for (int i = 0; i < outputs.size(); i++) { final Result<OriginalItem, Input> processingResult = successes.get(i); final Output persistenceResult = persistenceResults.get(i); b.add(Result.<OriginalItem, Output>builder(processingResult).withOutput(persistenceResult).success()); } for (final Result<OriginalItem, Input> failed : fails) { b.add(Result.<OriginalItem, Output>builder(failed).failed()); } return b.build(); }
From source file:org.prebake.core.GlobRelation.java
public @Nullable ImmutableList<Solution> allPossibleSolutions() { ImmutableList<String> keys; ImmutableList<Set<String>> valueSets; {/* ww w.j av a 2 s. com*/ ImmutableList.Builder<String> kb = ImmutableList.builder(); ImmutableList.Builder<Set<String>> vb = ImmutableList.builder(); for (Param p : parameters.values()) { if (p.allowedValues == null) { return null; } kb.add(p.name); vb.add(p.allowedValues); } keys = kb.build(); valueSets = vb.build(); } Map<String, String> bindings = Maps.newLinkedHashMap(); ImmutableList.Builder<Solution> solutions = ImmutableList.builder(); int n = keys.size(); for (List<String> values : Sets.cartesianProduct(valueSets)) { for (int i = 0; i < n; ++i) { bindings.put(keys.get(i), values.get(i)); solutions.add(withParameterValues(bindings)); } } return solutions.build(); }
From source file:com.google.testing.i18n.sanitycheck.parser.Parser.java
/** * Parses a given {@code expected} string and returns a list of the {@link Placeholder} instances * that were found.//from w ww. j av a 2 s .c om * * @param actual golden data string that should satisfy a given {@code expected} pattern. * @param expected pattern that describes a given {@code actual} data. * @return list of {@link Placeholder} entities. * @throws ParserException would be thrown if {@code expected} format is wrong or {@code actual} * does not satisfies {@code expected} format */ public ImmutableList<Placeholder> parse(String actual, String expected) throws ParserException { Matcher placeholderMatcher = PLACEHOLDER_PATTERN.matcher(expected); String extractionPatternText = buildExtractionPattern(placeholderMatcher, expected); placeholderMatcher.reset(); ImmutableList<String> rawPlaceholders = extractPlaceholders(placeholderMatcher); Pattern extractionPattern = Pattern.compile(extractionPatternText); // Extracting actual values from the given expected result. Matcher extractionMatcher = extractionPattern.matcher(actual); int extractedGroups = extractionMatcher.groupCount(); ImmutableList.Builder<Placeholder> placeholderTokens = ImmutableList.builder(); if (extractionMatcher.find()) { if (extractedGroups != rawPlaceholders.size()) { // Should never happen. The number of groups is always equal to the number of placeholders. throw new ParserException(String.format("Actual result does not satisfies the expected one.")); } for (int i = 1; i <= extractedGroups; i++) { String rawPlaceholder = rawPlaceholders.get(i - 1); String actualPlaceholderContent = extractionMatcher.group(i); if (rawPlaceholder.equals(actualPlaceholderContent)) { continue; } placeholderTokens.add(buildPlaceholder(rawPlaceholder, actualPlaceholderContent)); } return placeholderTokens.build(); } throw new ParserException(String.format( "Actual result does not satisfies the expected one. " + "Actual text \"%s\" is not parsable by means of the generated pattern \"%s\"", actual, expected)); }
From source file:org.geogit.rest.repository.MergeFeatureResource.java
public void post(Representation entity) { InputStream input = null;/*from w ww.j a v a2s. co m*/ try { input = getRequest().getEntity().getStream(); final GeoGIT ggit = getGeogit(getRequest()).get(); final Reader body = new InputStreamReader(input); final JsonParser parser = new JsonParser(); final JsonElement conflictJson = parser.parse(body); if (conflictJson.isJsonObject()) { final JsonObject conflict = conflictJson.getAsJsonObject(); String featureId = null; RevFeature ourFeature = null; RevFeatureType ourFeatureType = null; RevFeature theirFeature = null; RevFeatureType theirFeatureType = null; JsonObject merges = null; if (conflict.has("path") && conflict.get("path").isJsonPrimitive()) { featureId = conflict.get("path").getAsJsonPrimitive().getAsString(); } Preconditions.checkState(featureId != null); if (conflict.has("ours") && conflict.get("ours").isJsonPrimitive()) { String ourCommit = conflict.get("ours").getAsJsonPrimitive().getAsString(); Optional<NodeRef> ourNode = parseID(ObjectId.valueOf(ourCommit), featureId, ggit); if (ourNode.isPresent()) { Optional<RevObject> object = ggit.command(RevObjectParse.class) .setObjectId(ourNode.get().objectId()).call(); Preconditions.checkState(object.isPresent() && object.get() instanceof RevFeature); ourFeature = (RevFeature) object.get(); object = ggit.command(RevObjectParse.class).setObjectId(ourNode.get().getMetadataId()) .call(); Preconditions.checkState(object.isPresent() && object.get() instanceof RevFeatureType); ourFeatureType = (RevFeatureType) object.get(); } } if (conflict.has("theirs") && conflict.get("theirs").isJsonPrimitive()) { String theirCommit = conflict.get("theirs").getAsJsonPrimitive().getAsString(); Optional<NodeRef> theirNode = parseID(ObjectId.valueOf(theirCommit), featureId, ggit); if (theirNode.isPresent()) { Optional<RevObject> object = ggit.command(RevObjectParse.class) .setObjectId(theirNode.get().objectId()).call(); Preconditions.checkState(object.isPresent() && object.get() instanceof RevFeature); theirFeature = (RevFeature) object.get(); object = ggit.command(RevObjectParse.class).setObjectId(theirNode.get().getMetadataId()) .call(); Preconditions.checkState(object.isPresent() && object.get() instanceof RevFeatureType); theirFeatureType = (RevFeatureType) object.get(); } } if (conflict.has("merges") && conflict.get("merges").isJsonObject()) { merges = conflict.get("merges").getAsJsonObject(); } Preconditions.checkState(merges != null); Preconditions.checkState(ourFeatureType != null || theirFeatureType != null); SimpleFeatureBuilder featureBuilder = new SimpleFeatureBuilder( (SimpleFeatureType) (ourFeatureType != null ? ourFeatureType.type() : theirFeatureType.type())); ImmutableList<PropertyDescriptor> descriptors = (ourFeatureType == null ? theirFeatureType : ourFeatureType).sortedDescriptors(); for (Entry<String, JsonElement> entry : merges.entrySet()) { int descriptorIndex = getDescriptorIndex(entry.getKey(), descriptors); if (descriptorIndex != -1 && entry.getValue().isJsonObject()) { PropertyDescriptor descriptor = descriptors.get(descriptorIndex); JsonObject attributeObject = entry.getValue().getAsJsonObject(); if (attributeObject.has("ours") && attributeObject.get("ours").isJsonPrimitive() && attributeObject.get("ours").getAsBoolean()) { featureBuilder.set(descriptor.getName(), ourFeature == null ? null : ourFeature.getValues().get(descriptorIndex).orNull()); } else if (attributeObject.has("theirs") && attributeObject.get("theirs").isJsonPrimitive() && attributeObject.get("theirs").getAsBoolean()) { featureBuilder.set(descriptor.getName(), theirFeature == null ? null : theirFeature.getValues().get(descriptorIndex).orNull()); } else if (attributeObject.has("value") && attributeObject.get("value").isJsonPrimitive()) { JsonPrimitive primitive = attributeObject.get("value").getAsJsonPrimitive(); if (primitive.isString()) { try { Object object = valueFromString( FieldType.forBinding(descriptor.getType().getBinding()), primitive.getAsString()); featureBuilder.set(descriptor.getName(), object); } catch (Exception e) { throw new Exception("Unable to convert attribute (" + entry.getKey() + ") to required type: " + descriptor.getType().getBinding().toString()); } } else if (primitive.isNumber()) { try { Object value = valueFromNumber( FieldType.forBinding(descriptor.getType().getBinding()), primitive.getAsNumber()); featureBuilder.set(descriptor.getName(), value); } catch (Exception e) { throw new Exception("Unable to convert attribute (" + entry.getKey() + ") to required type: " + descriptor.getType().getBinding().toString()); } } else if (primitive.isBoolean()) { try { Object value = valueFromBoolean( FieldType.forBinding(descriptor.getType().getBinding()), primitive.getAsBoolean()); featureBuilder.set(descriptor.getName(), value); } catch (Exception e) { throw new Exception("Unable to convert attribute (" + entry.getKey() + ") to required type: " + descriptor.getType().getBinding().toString()); } } else if (primitive.isJsonNull()) { featureBuilder.set(descriptor.getName(), null); } else { throw new Exception( "Unsupported JSON type for attribute value (" + entry.getKey() + ")"); } } } } SimpleFeature feature = featureBuilder.buildFeature(NodeRef.nodeFromPath(featureId)); RevFeature revFeature = RevFeatureBuilder.build(feature); ggit.getRepository().getIndex().getDatabase().put(revFeature); getResponse() .setEntity(new StringRepresentation(revFeature.getId().toString(), MediaType.TEXT_PLAIN)); } } catch (Exception e) { throw new RestletException(e.getMessage(), Status.SERVER_ERROR_INTERNAL, e); } finally { if (input != null) Closeables.closeQuietly(input); } }
From source file:org.locationtech.geogig.rest.repository.MergeFeatureResource.java
public void post(Representation entity) { InputStream input = null;/*from w w w .java 2s . co m*/ try { input = getRequest().getEntity().getStream(); final GeoGIG ggit = getGeogig(getRequest()).get(); final Reader body = new InputStreamReader(input); final JsonParser parser = new JsonParser(); final JsonElement conflictJson = parser.parse(body); if (conflictJson.isJsonObject()) { final JsonObject conflict = conflictJson.getAsJsonObject(); String featureId = null; RevFeature ourFeature = null; RevFeatureType ourFeatureType = null; RevFeature theirFeature = null; RevFeatureType theirFeatureType = null; JsonObject merges = null; if (conflict.has("path") && conflict.get("path").isJsonPrimitive()) { featureId = conflict.get("path").getAsJsonPrimitive().getAsString(); } Preconditions.checkState(featureId != null); if (conflict.has("ours") && conflict.get("ours").isJsonPrimitive()) { String ourCommit = conflict.get("ours").getAsJsonPrimitive().getAsString(); Optional<NodeRef> ourNode = parseID(ObjectId.valueOf(ourCommit), featureId, ggit); if (ourNode.isPresent()) { Optional<RevObject> object = ggit.command(RevObjectParse.class) .setObjectId(ourNode.get().objectId()).call(); Preconditions.checkState(object.isPresent() && object.get() instanceof RevFeature); ourFeature = (RevFeature) object.get(); object = ggit.command(RevObjectParse.class).setObjectId(ourNode.get().getMetadataId()) .call(); Preconditions.checkState(object.isPresent() && object.get() instanceof RevFeatureType); ourFeatureType = (RevFeatureType) object.get(); } } if (conflict.has("theirs") && conflict.get("theirs").isJsonPrimitive()) { String theirCommit = conflict.get("theirs").getAsJsonPrimitive().getAsString(); Optional<NodeRef> theirNode = parseID(ObjectId.valueOf(theirCommit), featureId, ggit); if (theirNode.isPresent()) { Optional<RevObject> object = ggit.command(RevObjectParse.class) .setObjectId(theirNode.get().objectId()).call(); Preconditions.checkState(object.isPresent() && object.get() instanceof RevFeature); theirFeature = (RevFeature) object.get(); object = ggit.command(RevObjectParse.class).setObjectId(theirNode.get().getMetadataId()) .call(); Preconditions.checkState(object.isPresent() && object.get() instanceof RevFeatureType); theirFeatureType = (RevFeatureType) object.get(); } } if (conflict.has("merges") && conflict.get("merges").isJsonObject()) { merges = conflict.get("merges").getAsJsonObject(); } Preconditions.checkState(merges != null); Preconditions.checkState(ourFeatureType != null || theirFeatureType != null); SimpleFeatureBuilder featureBuilder = new SimpleFeatureBuilder( (SimpleFeatureType) (ourFeatureType != null ? ourFeatureType.type() : theirFeatureType.type())); ImmutableList<PropertyDescriptor> descriptors = (ourFeatureType == null ? theirFeatureType : ourFeatureType).sortedDescriptors(); for (Entry<String, JsonElement> entry : merges.entrySet()) { int descriptorIndex = getDescriptorIndex(entry.getKey(), descriptors); if (descriptorIndex != -1 && entry.getValue().isJsonObject()) { PropertyDescriptor descriptor = descriptors.get(descriptorIndex); JsonObject attributeObject = entry.getValue().getAsJsonObject(); if (attributeObject.has("ours") && attributeObject.get("ours").isJsonPrimitive() && attributeObject.get("ours").getAsBoolean()) { featureBuilder.set(descriptor.getName(), ourFeature == null ? null : ourFeature.getValues().get(descriptorIndex).orNull()); } else if (attributeObject.has("theirs") && attributeObject.get("theirs").isJsonPrimitive() && attributeObject.get("theirs").getAsBoolean()) { featureBuilder.set(descriptor.getName(), theirFeature == null ? null : theirFeature.getValues().get(descriptorIndex).orNull()); } else if (attributeObject.has("value") && attributeObject.get("value").isJsonPrimitive()) { JsonPrimitive primitive = attributeObject.get("value").getAsJsonPrimitive(); if (primitive.isString()) { try { Object object = valueFromString( FieldType.forBinding(descriptor.getType().getBinding()), primitive.getAsString()); featureBuilder.set(descriptor.getName(), object); } catch (Exception e) { throw new Exception("Unable to convert attribute (" + entry.getKey() + ") to required type: " + descriptor.getType().getBinding().toString()); } } else if (primitive.isNumber()) { try { Object value = valueFromNumber( FieldType.forBinding(descriptor.getType().getBinding()), primitive.getAsNumber()); featureBuilder.set(descriptor.getName(), value); } catch (Exception e) { throw new Exception("Unable to convert attribute (" + entry.getKey() + ") to required type: " + descriptor.getType().getBinding().toString()); } } else if (primitive.isBoolean()) { try { Object value = valueFromBoolean( FieldType.forBinding(descriptor.getType().getBinding()), primitive.getAsBoolean()); featureBuilder.set(descriptor.getName(), value); } catch (Exception e) { throw new Exception("Unable to convert attribute (" + entry.getKey() + ") to required type: " + descriptor.getType().getBinding().toString()); } } else if (primitive.isJsonNull()) { featureBuilder.set(descriptor.getName(), null); } else { throw new Exception( "Unsupported JSON type for attribute value (" + entry.getKey() + ")"); } } } } SimpleFeature feature = featureBuilder.buildFeature(NodeRef.nodeFromPath(featureId)); RevFeature revFeature = RevFeatureBuilder.build(feature); ggit.getRepository().objectDatabase().put(revFeature); getResponse() .setEntity(new StringRepresentation(revFeature.getId().toString(), MediaType.TEXT_PLAIN)); } } catch (Exception e) { throw new RestletException(e.getMessage(), Status.SERVER_ERROR_INTERNAL, e); } finally { if (input != null) Closeables.closeQuietly(input); } }
From source file:org.locationtech.geogig.spring.service.LegacyMergeFeatureService.java
public RevFeature mergeFeatures(RepositoryProvider provider, String repoName, String request) { // get the repo Repository repository = getRepository(provider, repoName); if (repository != null) { final JsonParser parser = new JsonParser(); final JsonElement conflictJson; try {/* w w w . j a va2 s . c o m*/ conflictJson = parser.parse(request); } catch (Exception e) { invalidPostData(); return null; } if (conflictJson.isJsonObject()) { final JsonObject conflict = conflictJson.getAsJsonObject(); String featureId = null; RevFeature ourFeature = null; RevFeatureType ourFeatureType = null; RevFeature theirFeature = null; RevFeatureType theirFeatureType = null; JsonObject merges = null; if (conflict.has("path") && conflict.get("path").isJsonPrimitive()) { featureId = conflict.get("path").getAsJsonPrimitive().getAsString(); } if (featureId == null) { invalidPostData(); } if (conflict.has("ours") && conflict.get("ours").isJsonPrimitive()) { String ourCommit = conflict.get("ours").getAsJsonPrimitive().getAsString(); Optional<NodeRef> ourNode = parseID(ObjectId.valueOf(ourCommit), featureId, repository); if (ourNode.isPresent()) { Optional<RevObject> object = repository.command(RevObjectParse.class) .setObjectId(ourNode.get().getObjectId()).call(); Preconditions.checkState(object.isPresent() && object.get() instanceof RevFeature); ourFeature = (RevFeature) object.get(); object = repository.command(RevObjectParse.class).setObjectId(ourNode.get().getMetadataId()) .call(); Preconditions.checkState(object.isPresent() && object.get() instanceof RevFeatureType); ourFeatureType = (RevFeatureType) object.get(); } } else { invalidPostData(); } if (conflict.has("theirs") && conflict.get("theirs").isJsonPrimitive()) { String theirCommit = conflict.get("theirs").getAsJsonPrimitive().getAsString(); Optional<NodeRef> theirNode = parseID(ObjectId.valueOf(theirCommit), featureId, repository); if (theirNode.isPresent()) { Optional<RevObject> object = repository.command(RevObjectParse.class) .setObjectId(theirNode.get().getObjectId()).call(); Preconditions.checkState(object.isPresent() && object.get() instanceof RevFeature); theirFeature = (RevFeature) object.get(); object = repository.command(RevObjectParse.class) .setObjectId(theirNode.get().getMetadataId()).call(); Preconditions.checkState(object.isPresent() && object.get() instanceof RevFeatureType); theirFeatureType = (RevFeatureType) object.get(); } } else { invalidPostData(); } if (conflict.has("merges") && conflict.get("merges").isJsonObject()) { merges = conflict.get("merges").getAsJsonObject(); } if (merges == null) { invalidPostData(); } Preconditions.checkState(ourFeatureType != null || theirFeatureType != null); SimpleFeatureBuilder featureBuilder = new SimpleFeatureBuilder( (SimpleFeatureType) (ourFeatureType != null ? ourFeatureType.type() : theirFeatureType.type())); ImmutableList<PropertyDescriptor> descriptors = (ourFeatureType == null ? theirFeatureType : ourFeatureType).descriptors(); for (Map.Entry<String, JsonElement> entry : merges.entrySet()) { int descriptorIndex = getDescriptorIndex(entry.getKey(), descriptors); if (descriptorIndex != -1 && entry.getValue().isJsonObject()) { PropertyDescriptor descriptor = descriptors.get(descriptorIndex); JsonObject attributeObject = entry.getValue().getAsJsonObject(); if (attributeObject.has("ours") && attributeObject.get("ours").isJsonPrimitive() && attributeObject.get("ours").getAsBoolean()) { featureBuilder.set(descriptor.getName(), ourFeature == null ? null : ourFeature.get(descriptorIndex).orNull()); } else if (attributeObject.has("theirs") && attributeObject.get("theirs").isJsonPrimitive() && attributeObject.get("theirs").getAsBoolean()) { featureBuilder.set(descriptor.getName(), theirFeature == null ? null : theirFeature.get(descriptorIndex).orNull()); } else if (attributeObject.has("value") && attributeObject.get("value").isJsonPrimitive()) { JsonPrimitive primitive = attributeObject.get("value").getAsJsonPrimitive(); if (primitive.isString()) { try { Object object = valueFromString( FieldType.forBinding(descriptor.getType().getBinding()), primitive.getAsString()); featureBuilder.set(descriptor.getName(), object); } catch (Exception e) { throw new CommandSpecException("Unable to convert attribute (" + entry.getKey() + ") to required type: " + descriptor.getType().getBinding().toString(), HttpStatus.INTERNAL_SERVER_ERROR); } } else if (primitive.isNumber()) { try { Object value = valueFromNumber( FieldType.forBinding(descriptor.getType().getBinding()), primitive.getAsNumber()); featureBuilder.set(descriptor.getName(), value); } catch (Exception e) { throw new CommandSpecException("Unable to convert attribute (" + entry.getKey() + ") to required type: " + descriptor.getType().getBinding().toString(), HttpStatus.INTERNAL_SERVER_ERROR); } } else if (primitive.isBoolean()) { try { Object value = valueFromBoolean( FieldType.forBinding(descriptor.getType().getBinding()), primitive.getAsBoolean()); featureBuilder.set(descriptor.getName(), value); } catch (Exception e) { throw new CommandSpecException("Unable to convert attribute (" + entry.getKey() + ") to required type: " + descriptor.getType().getBinding().toString(), HttpStatus.INTERNAL_SERVER_ERROR); } } else if (primitive.isJsonNull()) { featureBuilder.set(descriptor.getName(), null); } else { throw new CommandSpecException( "Unsupported JSON type for attribute value (" + entry.getKey() + ")", HttpStatus.INTERNAL_SERVER_ERROR); } } } } SimpleFeature feature = featureBuilder.buildFeature(NodeRef.nodeFromPath(featureId)); RevFeature revFeature = RevFeatureBuilder.build(feature); repository.objectDatabase().put(revFeature); return revFeature; } else { invalidPostData(); } } return null; }