Example usage for com.google.common.collect ImmutableMap.Builder putAll

List of usage examples for com.google.common.collect ImmutableMap.Builder putAll

Introduction

In this page you can find the example usage for com.google.common.collect ImmutableMap.Builder putAll.

Prototype

public final void putAll(Map<? extends K, ? extends V> map) 

Source Link

Usage

From source file:com.nesscomputing.config.FixedConfigModule.java

public FixedConfigModule(Map<String, String> configOverrides) {
    final ImmutableMap.Builder<String, String> builder = ImmutableMap.builder();
    builder.putAll(configOverrides);
    this.config = Config.getFixedConfig(new SystemConfiguration(), new MapConfiguration(builder.build()));
}

From source file:org.jclouds.elastichosts.functions.CreateDriveRequestToMap.java

@Override
public Map<String, String> apply(CreateDriveRequest from) {
    ImmutableMap.Builder<String, String> builder = ImmutableMap.builder();
    builder.putAll(baseDriveToMap.apply(from));
    if (from.getAvoid().size() != 0)
        builder.put("avoid", Joiner.on(' ').join(from.getAvoid()));
    if (from.getEncryptionCipher() != null)
        builder.put("encryption:cipher", from.getEncryptionCipher());
    return builder.build();
}

From source file:com.spotify.apollo.http.server.HttpRequest.java

@Override
public Request withHeader(String name, String value) {
    ImmutableMap.Builder<String, String> headers = ImmutableMap.builder();
    headers.putAll(headers());
    headers.put(name, value);/*w  w w  .  ja  v  a2s. com*/
    return create(method(), uri(), payload(), service(), parameters(), headers.build());
}

From source file:org.jclouds.elasticstack.functions.CreateDriveRequestToMap.java

@Override
public Map<String, String> apply(Drive from) {
    ImmutableMap.Builder<String, String> builder = ImmutableMap.builder();
    builder.putAll(baseDriveToMap.apply(from));
    if (from instanceof CreateDriveRequest) {
        CreateDriveRequest create = CreateDriveRequest.class.cast(from);
        if (create.getAvoid().size() != 0)
            builder.put("avoid", Joiner.on(' ').join(create.getAvoid()));
        if (create.getEncryptionCipher() != null)
            builder.put("encryption:cipher", create.getEncryptionCipher());
    }//w  ww.j av a  2 s .c o  m
    return builder.build();
}

From source file:com.google.devtools.build.lib.exec.local.PosixLocalEnvProvider.java

/**
 * Compute an environment map for local actions on Unix-like platforms (e.g. Linux, macOS).
 *
 * <p>Returns a map with the same keys and values as {@code env}. Overrides the value of TMPDIR
 * (or adds it if not present in {@code env}) by the value of {@code clientEnv.get("TMPDIR")}, or
 * if that's empty or null, then by "/tmp".
 *//* w  ww.j  a  v a2  s .c o  m*/
@Override
public Map<String, String> rewriteLocalEnv(Map<String, String> env, Path execRoot, String fallbackTmpDir) {
    ImmutableMap.Builder<String, String> result = ImmutableMap.builder();
    result.putAll(Maps.filterKeys(env, k -> !k.equals("TMPDIR")));
    String p = clientEnv.get("TMPDIR");
    if (Strings.isNullOrEmpty(p)) {
        // Do not use `fallbackTmpDir`, use `/tmp` instead. This way if the user didn't export TMPDIR
        // in their environment, Bazel will still set a TMPDIR that's Posixy enough and plays well
        // with heavily path-length-limited scenarios, such as the socket creation scenario that
        // motivated https://github.com/bazelbuild/bazel/issues/4376.
        p = "/tmp";
    }
    result.put("TMPDIR", p);
    return result.build();
}

From source file:com.facebook.presto.orc.checkpoint.Checkpoints.java

public static Map<StreamId, StreamCheckpoint> getStreamCheckpoints(Set<Integer> columns,
        List<OrcType> columnTypes, CompressionKind compressionKind, int rowGroupId,
        List<ColumnEncoding> columnEncodings, Map<StreamId, Stream> streams,
        Map<Integer, List<RowGroupIndex>> columnIndexes) throws InvalidCheckpointException {
    ImmutableSetMultimap.Builder<Integer, StreamKind> streamKindsBuilder = ImmutableSetMultimap.builder();
    for (Stream stream : streams.values()) {
        streamKindsBuilder.put(stream.getColumn(), stream.getStreamKind());
    }/*  w  w  w  . j  a v a2 s.c  o m*/
    SetMultimap<Integer, StreamKind> streamKinds = streamKindsBuilder.build();

    ImmutableMap.Builder<StreamId, StreamCheckpoint> checkpoints = ImmutableMap.builder();
    for (int column : columns) {
        List<Integer> positionsList = columnIndexes.get(column).get(rowGroupId).getPositions();

        ColumnEncodingKind columnEncoding = columnEncodings.get(column).getColumnEncodingKind();
        OrcTypeKind columnType = columnTypes.get(column).getOrcTypeKind();
        Set<StreamKind> availableStreams = streamKinds.get(column);

        ColumnPositionsList columnPositionsList = new ColumnPositionsList(column, columnType, positionsList);
        switch (columnType) {
        case BOOLEAN:
            checkpoints.putAll(getBooleanColumnCheckpoints(column, compressionKind, availableStreams,
                    columnPositionsList));
            break;
        case BYTE:
            checkpoints.putAll(
                    getByteColumnCheckpoints(column, compressionKind, availableStreams, columnPositionsList));
            break;
        case SHORT:
        case INT:
        case LONG:
        case DATE:
            checkpoints.putAll(getLongColumnCheckpoints(column, columnEncoding, compressionKind,
                    availableStreams, columnPositionsList));
            break;
        case FLOAT:
            checkpoints.putAll(
                    getFloatColumnCheckpoints(column, compressionKind, availableStreams, columnPositionsList));
            break;
        case DOUBLE:
            checkpoints.putAll(
                    getDoubleColumnCheckpoints(column, compressionKind, availableStreams, columnPositionsList));
            break;
        case TIMESTAMP:
            checkpoints.putAll(getTimestampColumnCheckpoints(column, columnEncoding, compressionKind,
                    availableStreams, columnPositionsList));
            break;
        case BINARY:
        case STRING:
            checkpoints.putAll(getSliceColumnCheckpoints(column, columnEncoding, compressionKind,
                    availableStreams, columnPositionsList));
            break;
        case LIST:
        case MAP:
            checkpoints.putAll(getListOrMapColumnCheckpoints(column, columnEncoding, compressionKind,
                    availableStreams, columnPositionsList));
            break;
        case STRUCT:
            checkpoints.putAll(
                    getStructColumnCheckpoints(column, compressionKind, availableStreams, columnPositionsList));
            break;
        case DECIMAL:
        case CHAR:
        case VARCHAR:
        case UNION:
            throw new IllegalArgumentException("Unsupported column type " + columnType);
        }

        // The DWRF code is not meticulous in the handling of checkpoints.  It appears that for the first row group
        // it will write checkpoints for all streams, but in other cases it will write only the streams that exist.
        // We detect this case by checking that all offsets in the initial position list are zero, and if so, we
        // clear the extra offsets
        if (columnPositionsList.hasNextPosition() && !Iterables.all(positionsList, equalTo(0))) {
            throw new InvalidCheckpointException(format(
                    "Column %s, of type %s, contains %s offset positions, but only %s positions were consumed",
                    column, columnType, positionsList.size(), columnPositionsList.getIndex()));
        }
    }
    return checkpoints.build();
}

From source file:com.spotify.helios.agent.LogConfigContainerDecorator.java

@Override
public void decorateHostConfig(Job job, Optional<String> dockerVersion, HostConfig.Builder hostConfig) {
    final LoggingConfiguration logging = job.getLogging();

    if (logging != Job.EMPTY_LOGGING) {
        final ImmutableMap.Builder<String, String> logOpts = ImmutableMap.builder();
        logOpts.putAll(logging.getOptions());
        hostConfig.logConfig(LogConfig.create(job.getLogging().getDriver(), logOpts.build()));
    }// w w w  .jav  a2s  . com
}

From source file:com.splicemachine.orc.checkpoint.Checkpoints.java

public static Map<StreamId, StreamCheckpoint> getStreamCheckpoints(Set<Integer> columns,
        List<OrcType> columnTypes, CompressionKind compressionKind, int rowGroupId,
        List<ColumnEncoding> columnEncodings, Map<StreamId, Stream> streams,
        Map<Integer, List<RowGroupIndex>> columnIndexes) throws InvalidCheckpointException {
    ImmutableSetMultimap.Builder<Integer, StreamKind> streamKindsBuilder = ImmutableSetMultimap.builder();
    for (Stream stream : streams.values()) {
        streamKindsBuilder.put(stream.getColumn(), stream.getStreamKind());
    }/*w w  w.  j  a v a 2 s.com*/
    SetMultimap<Integer, StreamKind> streamKinds = streamKindsBuilder.build();

    ImmutableMap.Builder<StreamId, StreamCheckpoint> checkpoints = ImmutableMap.builder();
    for (int column : columns) {
        List<Integer> positionsList = columnIndexes.get(column).get(rowGroupId).getPositions();

        ColumnEncodingKind columnEncoding = columnEncodings.get(column).getColumnEncodingKind();
        OrcTypeKind columnType = columnTypes.get(column).getOrcTypeKind();
        Set<StreamKind> availableStreams = streamKinds.get(column);

        ColumnPositionsList columnPositionsList = new ColumnPositionsList(column, columnType, positionsList);
        switch (columnType) {
        case BOOLEAN:
            checkpoints.putAll(getBooleanColumnCheckpoints(column, compressionKind, availableStreams,
                    columnPositionsList));
            break;
        case BYTE:
            checkpoints.putAll(
                    getByteColumnCheckpoints(column, compressionKind, availableStreams, columnPositionsList));
            break;
        case SHORT:
        case INT:
        case LONG:
        case DATE:
            checkpoints.putAll(getLongColumnCheckpoints(column, columnEncoding, compressionKind,
                    availableStreams, columnPositionsList));
            break;
        case FLOAT:
            checkpoints.putAll(
                    getFloatColumnCheckpoints(column, compressionKind, availableStreams, columnPositionsList));
            break;
        case DOUBLE:
            checkpoints.putAll(
                    getDoubleColumnCheckpoints(column, compressionKind, availableStreams, columnPositionsList));
            break;
        case TIMESTAMP:
            checkpoints.putAll(getTimestampColumnCheckpoints(column, columnEncoding, compressionKind,
                    availableStreams, columnPositionsList));
            break;
        case BINARY:
        case STRING:
        case VARCHAR:
        case CHAR:
            checkpoints.putAll(getSliceColumnCheckpoints(column, columnEncoding, compressionKind,
                    availableStreams, columnPositionsList));
            break;
        case LIST:
        case MAP:
            checkpoints.putAll(getListOrMapColumnCheckpoints(column, columnEncoding, compressionKind,
                    availableStreams, columnPositionsList));
            break;
        case STRUCT:
            checkpoints.putAll(
                    getStructColumnCheckpoints(column, compressionKind, availableStreams, columnPositionsList));
            break;
        case DECIMAL:
            checkpoints.putAll(getDecimalColumnCheckpoints(column, columnEncoding, compressionKind,
                    availableStreams, columnPositionsList));
            break;
        case UNION:
            throw new IllegalArgumentException("Unsupported column type " + columnType);
        }

        // The DWRF code is not meticulous in the handling of checkpoints.  It appears that for the first row group
        // it will write checkpoints for all streams, but in other cases it will write only the streams that exist.
        // We detect this case by checking that all offsets in the initial position list are zero, and if so, we
        // clear the extra offsets
        if (columnPositionsList.hasNextPosition() && !Iterables.all(positionsList, equalTo(0))) {
            throw new InvalidCheckpointException(format(
                    "Column %s, of type %s, contains %s offset positions, but only %s positions were consumed",
                    column, columnType, positionsList.size(), columnPositionsList.getIndex()));
        }
    }
    return checkpoints.build();
}

From source file:com.google.devtools.build.lib.exec.local.WindowsLocalEnvProvider.java

/**
 * Compute an environment map for local actions on Windows.
 *
 * <p>Returns a map with the same keys and values as {@code env}. Overrides the value of TMP and
 * TEMP (or adds them if not present in {@code env}) by the same value, which is:
 *
 * <ul>//from www  . j a v  a2 s  .  c o  m
 *   <li>the value of {@code clientEnv.get("TMP")}, or if that's empty or null, then
 *   <li>the value of {@code clientEnv.get("TEMP")}, or if that's empty or null, then
 *   <li>the value of {@code fallbackTmpDir}.
 * </ul>
 *
 * <p>The values for TMP and TEMP will use backslashes as directory separators.
 */
@Override
public Map<String, String> rewriteLocalEnv(Map<String, String> env, Path execRoot, String fallbackTmpDir) {
    ImmutableMap.Builder<String, String> result = ImmutableMap.builder();
    result.putAll(Maps.filterKeys(env, k -> !k.equals("TMP") && !k.equals("TEMP")));
    String p = clientEnv.get("TMP");
    if (Strings.isNullOrEmpty(p)) {
        p = clientEnv.get("TEMP");
        if (Strings.isNullOrEmpty(p)) {
            p = fallbackTmpDir;
        }
    }
    p = p.replace('/', '\\');
    result.put("TMP", p);
    result.put("TEMP", p);
    return result.build();
}

From source file:org.graylog2.dashboards.widgets.StreamSearchResultCountWidget.java

@Override
public Map<String, Object> getPersistedConfig() {
    final Map<String, Object> inheritedConfig = super.getPersistedConfig();
    final ImmutableMap.Builder<String, Object> persistedConfig = ImmutableMap.builder();
    persistedConfig.putAll(inheritedConfig);
    if (!isNullOrEmpty(streamId)) {
        persistedConfig.put("stream_id", streamId);
    }//from w  ww .ja  v a 2  s .  c om

    return persistedConfig.build();
}