Example usage for com.google.common.collect Maps newLinkedHashMap

List of usage examples for com.google.common.collect Maps newLinkedHashMap

Introduction

In this page you can find the example usage for com.google.common.collect Maps newLinkedHashMap.

Prototype

public static <K, V> LinkedHashMap<K, V> newLinkedHashMap() 

Source Link

Document

Creates a mutable, empty, insertion-ordered LinkedHashMap instance.

Usage

From source file:com.opengamma.financial.analytics.model.volatility.surface.black.defaultproperties.PureBlackVolatilitySurfaceDefaults.java

public PureBlackVolatilitySurfaceDefaults(final ComputationTargetType target,
        final String... defaultsPerTicker) {
    super(target, true);
    ArgumentChecker.notNull(defaultsPerTicker, "defaults per currency");
    final int n = defaultsPerTicker.length;
    ArgumentChecker.isTrue(n % 5 == 0,//  w ww.ja  v a  2s .  c om
            "Need one discounting curve name, curve currency, curve calculation config name and surface name per ticker value");
    _tickerToCurveName = Maps.newLinkedHashMap();
    _tickerToCurveCurrency = Maps.newLinkedHashMap();
    _tickerToCurveCalculationConfig = Maps.newLinkedHashMap();
    _tickerToSurfaceName = Maps.newLinkedHashMap();
    for (int i = 0; i < n; i += 5) {
        final String ticker = defaultsPerTicker[i];
        _tickerToCurveName.put(ticker, defaultsPerTicker[i + 1]);
        _tickerToCurveCurrency.put(ticker, defaultsPerTicker[i + 2]);
        _tickerToCurveCalculationConfig.put(ticker, defaultsPerTicker[i + 3]);
        _tickerToSurfaceName.put(ticker, defaultsPerTicker[i + 4]);
    }
}

From source file:org.apache.aurora.common.net.http.handlers.VarsJsonHandler.java

@VisibleForTesting
String getBody(boolean pretty) {
    Map<String, Object> vars = Maps.newLinkedHashMap();
    for (Stat<?> var : statSupplier.get()) {
        vars.put(var.getName(), var.read());
    }/*from  ww  w. j av a 2  s.co  m*/
    // TODO(wfarner): Let the jax-rs provider handle serialization.
    return getGson(pretty).toJson(vars);
}

From source file:com.netflix.bdp.s3.S3Util.java

public static PendingUpload multipartUpload(AmazonS3 client, File localFile, String partition, String bucket,
        String key, long uploadPartSize) {

    InitiateMultipartUploadResult initiate = client
            .initiateMultipartUpload(new InitiateMultipartUploadRequest(bucket, key));
    String uploadId = initiate.getUploadId();

    boolean threw = true;
    try {/*from   w ww  . ja  v  a 2s .  co m*/
        Map<Integer, String> etags = Maps.newLinkedHashMap();

        long offset = 0;
        long numParts = (localFile.length() / uploadPartSize
                + ((localFile.length() % uploadPartSize) > 0 ? 1 : 0));

        Preconditions.checkArgument(numParts > 0, "Cannot upload 0 byte file: " + localFile);

        for (int partNumber = 1; partNumber <= numParts; partNumber += 1) {
            long size = Math.min(localFile.length() - offset, uploadPartSize);
            UploadPartRequest part = new UploadPartRequest().withBucketName(bucket).withKey(key)
                    .withPartNumber(partNumber).withUploadId(uploadId).withFile(localFile)
                    .withFileOffset(offset).withPartSize(size).withLastPart(partNumber == numParts);

            UploadPartResult partResult = client.uploadPart(part);
            PartETag etag = partResult.getPartETag();
            etags.put(etag.getPartNumber(), etag.getETag());

            offset += uploadPartSize;
        }

        PendingUpload pending = new PendingUpload(partition, bucket, key, uploadId, etags);

        threw = false;

        return pending;

    } finally {
        if (threw) {
            try {
                client.abortMultipartUpload(new AbortMultipartUploadRequest(bucket, key, uploadId));
            } catch (AmazonClientException e) {
                LOG.error("Failed to abort multi-part upload", e);
            }
        }
    }
}

From source file:org.jetbrains.jet.lang.resolve.calls.inference.DebugConstraintResolutionListener.java

@Override
public void constraintsForUnknown(TypeParameterDescriptor typeParameterDescriptor, BoundsOwner typeValue) {
    if (!ResolutionDebugInfo.isResolutionDebugEnabled())
        return;//  ww w  . ja  va 2s.c  om
    Map<TypeParameterDescriptor, BoundsOwner> map = debugInfo.getByKey(BOUNDS_FOR_UNKNOWNS, candidateCall);
    if (map == null) {
        map = Maps.newLinkedHashMap();
        debugInfo.putByKey(BOUNDS_FOR_UNKNOWNS, candidateCall, map);
    }
    map.put(typeParameterDescriptor, typeValue);
}

From source file:org.eclipse.xtext.ide.serializer.impl.RelatedResourcesProvider.java

public List<RelatedResource> getRelatedResources(Collection<IResourceSnapshot> snapshots) {
    Map<URI, RelatedResource> result = Maps.newLinkedHashMap();
    for (IResourceSnapshot res : snapshots) {
        for (IEObjectSnapshot obj : res.getObjects().values()) {
            for (IReferenceSnapshot ref : obj.getIncomingReferences()) {
                URI source = ref.getSourceEObjectUri().trimFragment();
                RelatedResource related = result.get(source);
                if (related == null) {
                    related = new RelatedResource(source);
                    result.put(source, related);
                }//from   w ww.j  a v  a2  s .c  o  m
                related.outgoingReferences.add(ref);
            }
        }
    }
    Set<URI> added = Sets.newHashSet();
    Set<URI> removed = Sets.newHashSet();
    for (IResourceSnapshot res : snapshots) {
        URI oldUri = res.getURI();
        URI newUri = res.getResource().getURI();
        if (!oldUri.equals(newUri)) {
            added.add(newUri);
            removed.add(oldUri);
        } else {
            result.remove(newUri);
        }
    }
    removed.removeAll(added);
    for (URI uri : removed) {
        result.remove(uri);
    }
    return ImmutableList.copyOf(result.values());
}

From source file:com.google.api.tools.framework.importers.swagger.aspects.auth.model.SecurityRequirementModel.java

public static Map<String, SecurityRequirementModel> mergeSecurityRequirementModel(
        Map<String, SecurityRequirementModel> securityRequirementsFromCustomExtension,
        Map<String, SecurityRequirementModel> securityRequirementsFromSecurityObject) {
    if (securityRequirementsFromCustomExtension == null && securityRequirementsFromSecurityObject == null) {
        return null;
    }/*  w  w w.ja v a  2 s  . c o  m*/

    Map<String, SecurityRequirementModel> result = Maps.newLinkedHashMap();
    if (securityRequirementsFromSecurityObject != null) {
        result.putAll(securityRequirementsFromSecurityObject);
    }
    // Overwrite if the same definition is referenced inside the custom extension.
    if (securityRequirementsFromCustomExtension != null) {
        result.putAll(securityRequirementsFromCustomExtension);
    }

    return result;
}

From source file:org.apache.whirr.service.jclouds.VariablesToExport.java

@Override
public Map<String, String> get() {
    Map<String, String> metadataMap = Maps.newLinkedHashMap();

    addEnvironmentVariablesFromClusterSpec(metadataMap);
    addDefaultEnvironmentVariablesForInstance(metadataMap, instance);
    metadataMap.putAll(exports);/*w w w  . java 2 s .  c  om*/
    addPerInstanceCustomEnvironmentVariables(metadataMap, instance);

    return metadataMap;
}

From source file:org.valens.bamboo.configuration.TemplateConfigurator.java

@Override
public void populateContextForEdit(@NotNull Map<String, Object> context, @NotNull BuildConfiguration bc,
        @Nullable Plan plan) {//  w  ww  .ja  va  2s.com

    Map<String, String> result = Maps.newLinkedHashMap();
    super.populateContextForEdit(context, bc, plan);

    if (planManager != null) {
        for (TopLevelPlan p : planManager.getAllPlansUnrestricted()) {
            boolean state = true;

            if (p.getBuildDefinition().getCustomConfiguration().get(SELECTED_TEMPLATE_ENABLED) == null
                    || p.getBuildDefinition().getCustomConfiguration().get(SELECTED_TEMPLATE_ENABLED).toString()
                            .equalsIgnoreCase("false")) {
                state = false;
            }

            if (state) {
                for (Job j : p.getAllJobs()) {
                    log.debug("populateContextForEdit  " + p.getProject() + " - " + p.getName());
                    result.put(j.getName(), j.getKey());
                }
            }
        }
    }

    context.put("templates", result);
}

From source file:brooklyn.entity.group.QuarantineGroupImpl.java

@Override
public void expungeMembers(boolean stopFirst) {
    Set<Entity> members = ImmutableSet.copyOf(getMembers());
    RuntimeException exception = null;
    if (stopFirst) {
        Map<Entity, Task<?>> tasks = Maps.newLinkedHashMap();
        for (Entity member : members) {
            if (member instanceof Startable) {
                Task<Void> task = Effectors.invocation(member, Startable.STOP, ImmutableMap.of()).asTask();
                tasks.put(member, task);
            }/*from  w w  w .  j a  va  2  s. com*/
        }
        DynamicTasks.queueIfPossible(
                Tasks.parallel("stopping " + tasks.size() + " member" + Strings.s(tasks.size()) + " (parallel)",
                        tasks.values()))
                .orSubmitAsync(this);
        try {
            waitForTasksOnExpungeMembers(tasks);
        } catch (RuntimeException e) {
            Exceptions.propagateIfFatal(e);
            exception = e;
            LOG.warn("Problem stopping members of quarantine group " + this
                    + " (rethrowing after unmanaging members): " + e);
        }
    }
    for (Entity member : members) {
        removeMember(member);
        Entities.unmanage(member);
    }
    if (exception != null) {
        throw exception;
    }
}

From source file:cc.kave.episodes.export.StreamPartition.java

public Map<String, Integer> partition() throws ZipException, IOException {
    Map<String, Integer> mapping = Maps.newLinkedHashMap();
    EventStreamGenerator generator = new EventStreamGenerator();
    Map<Event, Integer> allEvents = getStream();

    String zipName = "";
    int partitionNo = 0;

    for (String zip : findZips()) {
        zipName = zip.toString();//from w  ww  . j av  a 2  s  .  co m

        if (REPO.equalsIgnoreCase("")) {
            partitionNo++;
            Logger.log("Partition %d", partitionNo);

            REPO = getRepoName(zipName);
        }
        if (!zipName.startsWith(REPO)) {
            mapping.put(REPO, partitionNo);
            REPO = getRepoName(zipName);
            storeStream(generator, allEvents, partitionNo);

            partitionNo++;
            Logger.log("Partition %d", partitionNo);
            generator = new EventStreamGenerator();
        }
        ReadingArchive ra = rootDir.getReadingArchive(zip);

        while (ra.hasNext()) {
            Context ctx = ra.getNext(Context.class);
            if (ctx == null) {
                continue;
            }
            generator.add(ctx);
        }
        ra.close();
    }
    Logger.log("Total number of partitions is: %d", partitionNo);
    storeStream(generator, allEvents, partitionNo);
    mapping.put(REPO, partitionNo);
    return mapping;
}