Example usage for com.google.common.collect Lists asList

List of usage examples for com.google.common.collect Lists asList

Introduction

In this page you can find the example usage for com.google.common.collect Lists asList.

Prototype

public static <E> List<E> asList(@Nullable E first, E[] rest) 

Source Link

Document

Returns an unmodifiable list containing the specified first element and backed by the specified array of additional elements.

Usage

From source file:com.google.android.testing.nativedriver.client.AdbConnection.java

/**
 * Runs {@code adb} using the given arguments and under the configuration
 * values passed to the constructor.//from   w  w  w  . j  a  va 2 s  .  c  o m
 *
 * @param arguments the arguments to pass to the {@code adb} utility
 * @return a {@code Process} object representing the {@code adb} process
 */
protected Process runAdb(String... arguments) {
    List<String> commandLine = Lists.asList(adbPath, arguments);
    ProcessBuilder processBuilder = newProcessBuilder(commandLine);

    // If ports are initialized add them to the environment.
    Map<String, String> environment = processBuilder.environment();

    if (adbServerPort != null) {
        environment.put("ANDROID_ADB_SERVER_PORT", adbServerPort.toString());
    }
    if (emulatorConsolePort != null) {
        environment.put("ANDROID_EMULATOR_CONSOLE_PORT", emulatorConsolePort.toString());
    }
    if (emulatorAdbPort != null) {
        environment.put("ANDROID_EMULATOR_ADB_PORT", emulatorAdbPort.toString());
    }

    try {
        return callProcessBuilderStart(processBuilder);
    } catch (IOException exception) {
        throw new AdbException("An IOException occurred when starting ADB.", exception);
    }
}

From source file:org.opennms.gizmo.k8s.GizmoK8sStacker.java

public URL getProxyUrl(String service, String... parts) {
    try {/*w  ww .j a v a 2s.  co  m*/
        URL baseUrl = new URL(URLUtils.join(kubernetes.getMasterUrl().toString(), "api", "v1", "proxy",
                "namespaces", namespace, "services", service));
        return new URL(URLUtils.join(Lists.asList(baseUrl.toString(), parts).toArray(new String[0])));
    } catch (MalformedURLException e) {
        throw Throwables.propagate(e);
    }
}

From source file:com.youtube.serializer.YoutubeActivityUtil.java

/**
 * Formats the ID to conform with the Apache Streams activity ID convention
 * @param idparts the parts of the ID to join
 * @return a valid Activity ID in format "id:youtube:part1:part2:...partN"
 *///  w w  w . j a  v  a2  s  .c  o  m
public static String formatId(String... idparts) {
    return Joiner.on(":").join(Lists.asList("id:youtube", idparts));
}

From source file:com.google.api.codegen.gapic.GapicTestBase2.java

/**
 * Creates the constructor arguments to be passed onto this class (GapicTestBase2) to create test
 * methods. The langauge String is passed to GapicGeneratorFactory to get the GapicGenerators
 * provided by that language, and then the snippet file names are scraped from those generators,
 * and a set of arguments is created for each combination of CodeGenerator x snippet that
 * GapicGeneratorFactory returns.//  w w w .ja v  a 2 s  .c om
 */
public static Object[] createTestConfig(TargetLanguage language, String[] gapicConfigFileNames,
        String packageConfigFileName, String apiName, String protoPackage, String clientPackage,
        String... baseNames) {
    Model model = Model.create(Service.getDefaultInstance());
    GapicProductConfig productConfig = GapicProductConfig.createDummyInstance();
    PackageMetadataConfig packageConfig = PackageMetadataConfig.createDummyPackageMetadataConfig();
    ArtifactFlags artifactFlags = new ArtifactFlags(Arrays.asList("surface", "test", "samples"),
            ArtifactType.LEGACY_GAPIC_AND_PACKAGE);

    List<CodeGenerator<?>> generators = GapicGeneratorFactory.create(language, model, productConfig,
            packageConfig, artifactFlags, true);

    List<String> snippetNames = new ArrayList<>();
    for (CodeGenerator<?> generator : generators) {
        snippetNames.addAll(generator.getInputFileNames());
    }

    StringBuilder gapic_config_missing = new StringBuilder();
    if (gapicConfigFileNames == null || gapicConfigFileNames.length == 0) {
        gapic_config_missing.append("_no_gapic_config");
    }

    String baseline = language.toString().toLowerCase() + "_" + apiName + gapic_config_missing + ".baseline";
    baseNames = Lists.asList(apiName, baseNames).toArray(new String[0]);

    return new Object[] { language, gapicConfigFileNames, packageConfigFileName, snippetNames, baseline,
            protoPackage, clientPackage, baseNames };
}

From source file:org.sonar.db.debt.CharacteristicDao.java

public void insert(DbSession session, CharacteristicDto item, CharacteristicDto... others) {
    insert(session, Lists.asList(item, others));
}

From source file:com.google.jimfs.PathService.java

/**
 * Parses the given strings as a path./*from  w  w  w.j  av a 2s .co  m*/
 */
public JimfsPath parsePath(String first, String... more) {
    String joined = type.joiner().join(Iterables.filter(Lists.asList(first, more), NOT_EMPTY));
    return toPath(type.parsePath(joined));
}

From source file:org.apache.druid.query.topn.TopNQueryBuilder.java

public TopNQueryBuilder filters(String dimensionName, String value, String... values) {
    dimFilter = new InDimFilter(dimensionName, Lists.asList(value, values), null);
    return this;
}

From source file:org.sonar.sslr.grammar.GrammarBuilder.java

/**
 * Creates parsing expression - "one or more".
 * Convenience method equivalent to calling {@code oneOrMore(sequence(e1, rest))}.
 *
 * @param e1  first sub-expression//from   ww w  .  j a va  2 s.co  m
 * @param rest  rest of sub-expressions
 * @throws IllegalArgumentException if any of given arguments is not a parsing expression
 * @see #oneOrMore(Object)
 * @see #sequence(Object, Object)
 */
public final Object oneOrMore(Object e1, Object... rest) {
    return new OneOrMoreExpression(new SequenceExpression(convertToExpressions(Lists.asList(e1, rest))));
}

From source file:org.apache.brooklyn.util.http.HttpAsserts.java

public static void assertContentContainsText(final String url, final String phrase,
        final String... additionalPhrases) {
    try {//from w  w w.j  a v  a 2 s .  c o  m
        String contents = HttpTool.getContent(url);
        Asserts.assertTrue(contents != null && contents.length() > 0);
        for (String text : Lists.asList(phrase, additionalPhrases)) {
            if (!contents.contains(text)) {
                LOG.warn("CONTENTS OF URL " + url + " MISSING TEXT: " + text + "\n" + contents);
                Asserts.fail("URL " + url + " does not contain text: " + text);
            }
        }
    } catch (Exception e) {
        throw propagateAsAssertionError(e);
    }
}

From source file:ml.shifu.shifu.container.obj.ModelConfig.java

/**
 * Create init ModelConfig.json//from w  w  w  .j  a v a2  s  .c  om
 * 
 * @param modelName
 *            name of model dataset
 * @param alg
 *            , algorithm used, for LR/NN/RF/GBT, diferent init parameters will be set
 * @param description
 *            data set description
 * @param enableHadoop
 *            if it is distributed Hadoop cluster mode
 * @return ModelConfig instance
 * @throws IOException
 *             if any exception in column configuration file creation
 */
public static ModelConfig createInitModelConfig(String modelName, ALGORITHM alg, String description,
        boolean enableHadoop) throws IOException {
    ModelConfig modelConfig = new ModelConfig();

    DateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");

    // build meta info
    ModelBasicConf basic = new ModelBasicConf();
    basic.setName(modelName);

    basic.setAuthor(Environment.getProperty(Environment.SYSTEM_USER));
    basic.setRunMode(enableHadoop ? RunMode.DIST : RunMode.LOCAL);
    basic.setDescription("Created at " + df.format(new Date()));
    modelConfig.setBasic(basic);

    // build data set info
    ModelSourceDataConf dataSet = new ModelSourceDataConf();
    dataSet.setDataDelimiter("|");

    String exampleLocalDSPath = new File(Environment.getProperty(Environment.SHIFU_HOME),
            File.separator + "example" + File.separator + "cancer-judgement" + File.separator + "DataStore"
                    + File.separator + "DataSet1").toString();
    if (enableHadoop) {
        Path dst = new Path(File.separator + "user" + File.separator
                + Environment.getProperty(Environment.SYSTEM_USER) + File.separator + "cancer-judgement");
        if (!ShifuFileUtils.isFileExists(dst, SourceType.HDFS)) {
            HDFSUtils.getFS().mkdirs(dst);
            HDFSUtils.getFS().copyFromLocalFile(new Path(exampleLocalDSPath), dst);
        }
        dataSet.setSource(SourceType.HDFS);
        dataSet.setDataPath(new File(
                File.separator + "user" + File.separator + Environment.getProperty(Environment.SYSTEM_USER)
                        + File.separator + "cancer-judgement" + File.separator + "DataSet1").toString());
        dataSet.setHeaderPath(new File(File.separator + "user" + File.separator
                + Environment.getProperty(Environment.SYSTEM_USER) + File.separator + "cancer-judgement"
                + File.separator + "DataSet1" + File.separator + ".pig_header").toString());
    } else {
        dataSet.setSource(SourceType.LOCAL);
        dataSet.setDataPath(exampleLocalDSPath);
        dataSet.setHeaderPath(exampleLocalDSPath + File.separator + ".pig_header");
    }

    dataSet.setTargetColumnName("diagnosis");

    List<String> posTags = new ArrayList<String>();
    posTags.add("M");
    List<String> negTags = new ArrayList<String>();
    negTags.add("B");

    dataSet.setPosTags(posTags);
    dataSet.setNegTags(negTags);

    dataSet.setMissingOrInvalidValues(Lists.asList("", new String[] { "*", "#", "?", "null", "~" }));
    // create empty <ModelName>/meta.column.names
    ShifuFileUtils.createFileIfNotExists(new Path(modelName,
            Constants.COLUMN_META_FOLDER_NAME + File.separator + Constants.DEFAULT_META_COLUMN_FILE).toString(),
            SourceType.LOCAL);
    dataSet.setMetaColumnNameFile(
            Constants.COLUMN_META_FOLDER_NAME + File.separator + Constants.DEFAULT_META_COLUMN_FILE);
    // create empty <ModelName>/categorical.column.names
    ShifuFileUtils.createFileIfNotExists(new Path(modelName,
            Constants.COLUMN_META_FOLDER_NAME + File.separator + Constants.DEFAULT_CATEGORICAL_COLUMN_FILE)
                    .toString(),
            SourceType.LOCAL);
    dataSet.setCategoricalColumnNameFile(
            Constants.COLUMN_META_FOLDER_NAME + File.separator + Constants.DEFAULT_CATEGORICAL_COLUMN_FILE);
    modelConfig.setDataSet(dataSet);

    // build stats info
    modelConfig.setStats(new ModelStatsConf());
    modelConfig.setBinningAlgorithm(BinningAlgorithm.SPDTI);

    // build normalize info
    modelConfig.setNormalize(new ModelNormalizeConf());

    // build varselect info
    ModelVarSelectConf varselect = new ModelVarSelectConf();
    // create empty <ModelName>/forceselect.column.names
    ShifuFileUtils.createFileIfNotExists(new Path(modelName,
            Constants.COLUMN_META_FOLDER_NAME + File.separator + Constants.DEFAULT_FORCESELECT_COLUMN_FILE)
                    .toString(),
            SourceType.LOCAL);
    varselect.setForceSelectColumnNameFile(
            Constants.COLUMN_META_FOLDER_NAME + File.separator + Constants.DEFAULT_FORCESELECT_COLUMN_FILE);

    // create empty <ModelName>/forceremove.column.names
    ShifuFileUtils.createFileIfNotExists(new Path(modelName,
            Constants.COLUMN_META_FOLDER_NAME + File.separator + Constants.DEFAULT_FORCEREMOVE_COLUMN_FILE)
                    .toString(),
            SourceType.LOCAL);
    varselect.setForceRemoveColumnNameFile(
            Constants.COLUMN_META_FOLDER_NAME + File.separator + Constants.DEFAULT_FORCEREMOVE_COLUMN_FILE);
    varselect.setFilterEnable(Boolean.TRUE);
    varselect.setFilterNum(200);
    modelConfig.setVarSelect(varselect);

    // build train info
    ModelTrainConf trainConf = new ModelTrainConf();

    trainConf.setAlgorithm(alg.name());
    trainConf.setEpochsPerIteration(1);
    trainConf.setParams(ModelTrainConf.createParamsByAlg(alg, trainConf));
    trainConf.setNumTrainEpochs(100);
    if (ALGORITHM.NN.equals(alg)) {
        trainConf.setNumTrainEpochs(200);
    } else if (ALGORITHM.SVM.equals(alg)) {
        trainConf.setNumTrainEpochs(100);
    } else if (ALGORITHM.RF.equals(alg)) {
        trainConf.setNumTrainEpochs(20000);
    } else if (ALGORITHM.GBT.equals(alg)) {
        trainConf.setNumTrainEpochs(20000);
    } else if (ALGORITHM.LR.equals(alg)) {
        trainConf.setNumTrainEpochs(100);
    } else if (ALGORITHM.TENSORFLOW.equals(alg)) {
        trainConf.setNumTrainEpochs(100);
    }
    trainConf.setBaggingWithReplacement(false);
    modelConfig.setTrain(trainConf);

    EvalConfig evalConfig = new EvalConfig();
    evalConfig.setName("Eval1");
    RawSourceData evalSet = modelConfig.getDataSet().cloneRawSourceData();
    evalSet.setDataDelimiter("|");
    String exampleLocalESFolder = new File(Environment.getProperty(Environment.SHIFU_HOME),
            File.separator + "example" + File.separator + "cancer-judgement" + File.separator + "DataStore"
                    + File.separator + "EvalSet1").toString();
    if (enableHadoop) {
        evalSet.setSource(SourceType.HDFS);
        Path dst = new Path(File.separator + "user" + File.separator
                + Environment.getProperty(Environment.SYSTEM_USER) + File.separator + "cancer-judgement");
        if (!ShifuFileUtils.isFileExists(dst, SourceType.HDFS)) {
            HDFSUtils.getFS().copyFromLocalFile(new Path(exampleLocalESFolder), dst);
        }

        evalSet.setDataPath(new File(
                File.separator + "user" + File.separator + Environment.getProperty(Environment.SYSTEM_USER)
                        + File.separator + "cancer-judgement" + File.separator + "EvalSet1").toString());
        evalSet.setHeaderPath(new File(File.separator + "user" + File.separator
                + Environment.getProperty(Environment.SYSTEM_USER) + File.separator + "cancer-judgement"
                + File.separator + "EvalSet1" + File.separator + ".pig_header").toString());
    } else {
        evalSet.setSource(SourceType.LOCAL);
        evalSet.setDataPath(exampleLocalESFolder);
        evalSet.setHeaderPath(exampleLocalESFolder + File.separator + ".pig_header");
    }
    // create empty <ModelName>/<EvalSetName>.meta.column.names
    String namesFilePath = Constants.COLUMN_META_FOLDER_NAME + File.separator + evalConfig.getName() + "."
            + Constants.DEFAULT_META_COLUMN_FILE;
    ShifuFileUtils.createFileIfNotExists(new Path(modelName, namesFilePath).toString(), SourceType.LOCAL);
    evalSet.setMetaColumnNameFile(namesFilePath);
    evalConfig.setDataSet(evalSet);

    // create empty <ModelName>/<EvalSetName>Score.meta.column.names
    namesFilePath = Constants.COLUMN_META_FOLDER_NAME + File.separator + evalConfig.getName()
            + Constants.DEFAULT_CHAMPIONSCORE_META_COLUMN_FILE;
    ShifuFileUtils.createFileIfNotExists(new Path(modelName, namesFilePath).toString(), SourceType.LOCAL);
    evalConfig.setScoreMetaColumnNameFile(namesFilePath);

    modelConfig.getEvals().add(evalConfig);
    return modelConfig;
}