Example usage for com.google.common.collect ImmutableList of

List of usage examples for com.google.common.collect ImmutableList of

Introduction

In this page you can find the example usage for com.google.common.collect ImmutableList of.

Prototype

public static <E> ImmutableList<E> of(E e1, E e2, E e3, E e4, E e5) 

Source Link

Usage

From source file:com.feedzai.fos.samples.weka.WekaTraining.java

public static void main(String[] args) throws RemoteException, NotBoundException, FOSException {
    FOSManagerAdapter manager = FOSManagerAdapter.create("localhost", 5959);

    List<Attribute> attributes = ImmutableList.of(new NumericAttribute("sepalLength"),
            new NumericAttribute("sepalWidth"), new NumericAttribute("petalLength"),
            new NumericAttribute("petalWidth"), new CategoricalAttribute("class",
                    ImmutableList.of("Iris-setosa", "Iris-versicolor", "Iris-virginica")));

    Map<String, String> properties = ImmutableMap.of(WekaModelConfig.CLASS_INDEX, "4",
            WekaModelConfig.CLASSIFIER_IMPL, J48.class.getName());

    ModelConfig modelConfig = new ModelConfig(attributes, properties);

    File trainFile = new File("iris.data");

    UUID uuid = manager.trainAndAddFile(modelConfig, trainFile.getAbsolutePath());

    System.out.println("Trained model UUID = " + uuid);
}

From source file:fi.jyu.ties454.assignment3.group0.task3.Run.java

public static void main(String[] args) throws Exception {
    // now a clean map is loaded
    InputStream is = Run.class.getResourceAsStream("rectangleRoomLargeClean.txt");
    if (is == null) {
        System.err.println("Did you copy the resource folder as instructed?");
        System.exit(1);//  ww w  . j a  v a2s .  co m
    }
    Floor map = Floor.readFromReader(new InputStreamReader(is, StandardCharsets.US_ASCII));

    // currently starts 5 agents based on the same class. This is likely not
    // what you want. You can make 5 different classes and specialize as you
    // want.
    List<GameAgent> cleaners = ImmutableList.of(new MyCleaner(), new MyCleaner(), new MyCleaner(),
            new MyCleaner(), new MyCleaner());
    // more friends to play with
    List<GameAgent> dirtiers = ImmutableList.of(new MyDirtier(), new MyDirtier());

    // Create a game with the map and the cleaners. There are also
    // constructors which take more arguments. They will be used in later
    // exercises.
    Game g = new Game(map, cleaners, dirtiers);
    // Start the game. This will also show the a 'graphical' representation
    // of the state of the rooms.
    // The agent will start on a random location on the map.
    g.start();
}

From source file:org.apache.druid.benchmark.FloatCompressionBenchmarkFileGenerator.java

public static void main(String[] args) throws IOException {
    if (args.length >= 1) {
        dirPath = args[0];/* w w w  .  j  a va2  s  .c o m*/
    }

    BenchmarkColumnSchema enumeratedSchema = BenchmarkColumnSchema.makeEnumerated("", ValueType.FLOAT, true, 1,
            0d, ImmutableList.of(0f, 1.1f, 2.2f, 3.3f, 4.4f),
            ImmutableList.of(0.95, 0.001, 0.0189, 0.03, 0.0001));
    BenchmarkColumnSchema zipfLowSchema = BenchmarkColumnSchema.makeZipf("", ValueType.FLOAT, true, 1, 0d, -1,
            1000, 1d);
    BenchmarkColumnSchema zipfHighSchema = BenchmarkColumnSchema.makeZipf("", ValueType.FLOAT, true, 1, 0d, -1,
            1000, 3d);
    BenchmarkColumnSchema sequentialSchema = BenchmarkColumnSchema.makeSequential("", ValueType.FLOAT, true, 1,
            0d, 1470187671, 2000000000);
    BenchmarkColumnSchema uniformSchema = BenchmarkColumnSchema.makeContinuousUniform("", ValueType.FLOAT, true,
            1, 0d, 0, 1000);

    Map<String, BenchmarkColumnValueGenerator> generators = new HashMap<>();
    generators.put("enumerate", new BenchmarkColumnValueGenerator(enumeratedSchema, 1));
    generators.put("zipfLow", new BenchmarkColumnValueGenerator(zipfLowSchema, 1));
    generators.put("zipfHigh", new BenchmarkColumnValueGenerator(zipfHighSchema, 1));
    generators.put("sequential", new BenchmarkColumnValueGenerator(sequentialSchema, 1));
    generators.put("uniform", new BenchmarkColumnValueGenerator(uniformSchema, 1));

    File dir = new File(dirPath);
    dir.mkdir();

    // create data files using BenchmarkColunValueGenerator
    for (Map.Entry<String, BenchmarkColumnValueGenerator> entry : generators.entrySet()) {
        final File dataFile = new File(dir, entry.getKey());
        dataFile.delete();
        try (Writer writer = Files.newBufferedWriter(dataFile.toPath(), StandardCharsets.UTF_8)) {
            for (int i = 0; i < ROW_NUM; i++) {
                writer.write((Float) entry.getValue().generateRowValue() + "\n");
            }
        }
    }

    // create compressed files using all combinations of CompressionStrategy and FloatEncoding provided
    for (Map.Entry<String, BenchmarkColumnValueGenerator> entry : generators.entrySet()) {
        for (CompressionStrategy compression : compressions) {
            String name = entry.getKey() + "-" + compression.toString();
            log.info("%s: ", name);
            File compFile = new File(dir, name);
            compFile.delete();
            File dataFile = new File(dir, entry.getKey());

            ColumnarFloatsSerializer writer = CompressionFactory.getFloatSerializer(
                    new OffHeapMemorySegmentWriteOutMedium(), "float", ByteOrder.nativeOrder(), compression);
            try (BufferedReader br = Files.newBufferedReader(dataFile.toPath(), StandardCharsets.UTF_8);
                    FileChannel output = FileChannel.open(compFile.toPath(), StandardOpenOption.CREATE_NEW,
                            StandardOpenOption.WRITE)) {
                writer.open();
                String line;
                while ((line = br.readLine()) != null) {
                    writer.add(Float.parseFloat(line));
                }
                writer.writeTo(output, null);
            }
            log.info("%d", compFile.length() / 1024);
        }
    }
}

From source file:org.apache.druid.benchmark.LongCompressionBenchmarkFileGenerator.java

public static void main(String[] args) throws IOException {
    if (args.length >= 1) {
        dirPath = args[0];//from   w w w.  j  ava 2 s.  com
    }

    BenchmarkColumnSchema enumeratedSchema = BenchmarkColumnSchema.makeEnumerated("", ValueType.LONG, true, 1,
            0d, ImmutableList.of(0, 1, 2, 3, 4), ImmutableList.of(0.95, 0.001, 0.0189, 0.03, 0.0001));
    BenchmarkColumnSchema zipfLowSchema = BenchmarkColumnSchema.makeZipf("", ValueType.LONG, true, 1, 0d, -1,
            1000, 1d);
    BenchmarkColumnSchema zipfHighSchema = BenchmarkColumnSchema.makeZipf("", ValueType.LONG, true, 1, 0d, -1,
            1000, 3d);
    BenchmarkColumnSchema sequentialSchema = BenchmarkColumnSchema.makeSequential("", ValueType.LONG, true, 1,
            0d, 1470187671, 2000000000);
    BenchmarkColumnSchema uniformSchema = BenchmarkColumnSchema.makeDiscreteUniform("", ValueType.LONG, true, 1,
            0d, 0, 1000);

    Map<String, BenchmarkColumnValueGenerator> generators = new HashMap<>();
    generators.put("enumerate", new BenchmarkColumnValueGenerator(enumeratedSchema, 1));
    generators.put("zipfLow", new BenchmarkColumnValueGenerator(zipfLowSchema, 1));
    generators.put("zipfHigh", new BenchmarkColumnValueGenerator(zipfHighSchema, 1));
    generators.put("sequential", new BenchmarkColumnValueGenerator(sequentialSchema, 1));
    generators.put("uniform", new BenchmarkColumnValueGenerator(uniformSchema, 1));

    File dir = new File(dirPath);
    dir.mkdir();

    // create data files using BenchmarkColunValueGenerator
    for (Map.Entry<String, BenchmarkColumnValueGenerator> entry : generators.entrySet()) {
        final File dataFile = new File(dir, entry.getKey());
        dataFile.delete();
        try (Writer writer = Files.newBufferedWriter(dataFile.toPath(), StandardCharsets.UTF_8)) {
            for (int i = 0; i < ROW_NUM; i++) {
                writer.write((long) entry.getValue().generateRowValue() + "\n");
            }
        }
    }

    // create compressed files using all combinations of CompressionStrategy and LongEncoding provided
    for (Map.Entry<String, BenchmarkColumnValueGenerator> entry : generators.entrySet()) {
        for (CompressionStrategy compression : compressions) {
            for (CompressionFactory.LongEncodingStrategy encoding : encodings) {
                String name = entry.getKey() + "-" + compression.toString() + "-" + encoding.toString();
                log.info("%s: ", name);
                File compFile = new File(dir, name);
                compFile.delete();
                File dataFile = new File(dir, entry.getKey());

                ColumnarLongsSerializer writer = CompressionFactory.getLongSerializer(
                        new OffHeapMemorySegmentWriteOutMedium(), "long", ByteOrder.nativeOrder(), encoding,
                        compression);
                try (BufferedReader br = Files.newBufferedReader(dataFile.toPath(), StandardCharsets.UTF_8);
                        FileChannel output = FileChannel.open(compFile.toPath(), StandardOpenOption.CREATE_NEW,
                                StandardOpenOption.WRITE)) {
                    writer.open();
                    String line;
                    while ((line = br.readLine()) != null) {
                        writer.add(Long.parseLong(line));
                    }
                    writer.writeTo(output, null);
                }
                log.info("%d", compFile.length() / 1024);
            }
        }
    }
}

From source file:io.druid.benchmark.FloatCompressionBenchmarkFileGenerator.java

public static void main(String[] args) throws IOException, URISyntaxException {
    if (args.length >= 1) {
        dirPath = args[0];/*ww  w  . j  a v a 2  s.c  om*/
    }

    BenchmarkColumnSchema enumeratedSchema = BenchmarkColumnSchema.makeEnumerated("", ValueType.FLOAT, true, 1,
            0d, ImmutableList.<Object>of(0f, 1.1f, 2.2f, 3.3f, 4.4f),
            ImmutableList.of(0.95, 0.001, 0.0189, 0.03, 0.0001));
    BenchmarkColumnSchema zipfLowSchema = BenchmarkColumnSchema.makeZipf("", ValueType.FLOAT, true, 1, 0d, -1,
            1000, 1d);
    BenchmarkColumnSchema zipfHighSchema = BenchmarkColumnSchema.makeZipf("", ValueType.FLOAT, true, 1, 0d, -1,
            1000, 3d);
    BenchmarkColumnSchema sequentialSchema = BenchmarkColumnSchema.makeSequential("", ValueType.FLOAT, true, 1,
            0d, 1470187671, 2000000000);
    BenchmarkColumnSchema uniformSchema = BenchmarkColumnSchema.makeContinuousUniform("", ValueType.FLOAT, true,
            1, 0d, 0, 1000);

    Map<String, BenchmarkColumnValueGenerator> generators = new HashMap<>();
    generators.put("enumerate", new BenchmarkColumnValueGenerator(enumeratedSchema, 1));
    generators.put("zipfLow", new BenchmarkColumnValueGenerator(zipfLowSchema, 1));
    generators.put("zipfHigh", new BenchmarkColumnValueGenerator(zipfHighSchema, 1));
    generators.put("sequential", new BenchmarkColumnValueGenerator(sequentialSchema, 1));
    generators.put("uniform", new BenchmarkColumnValueGenerator(uniformSchema, 1));

    File dir = new File(dirPath);
    dir.mkdir();

    // create data files using BenchmarkColunValueGenerator
    for (Map.Entry<String, BenchmarkColumnValueGenerator> entry : generators.entrySet()) {
        final File dataFile = new File(dir, entry.getKey());
        dataFile.delete();
        try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(dataFile)))) {
            for (int i = 0; i < ROW_NUM; i++) {
                writer.write((Float) entry.getValue().generateRowValue() + "\n");
            }
        }
    }

    // create compressed files using all combinations of CompressionStrategy and FloatEncoding provided
    for (Map.Entry<String, BenchmarkColumnValueGenerator> entry : generators.entrySet()) {
        for (CompressedObjectStrategy.CompressionStrategy compression : compressions) {
            String name = entry.getKey() + "-" + compression.toString();
            System.out.print(name + ": ");
            File compFile = new File(dir, name);
            compFile.delete();
            File dataFile = new File(dir, entry.getKey());

            TmpFileIOPeon iopeon = new TmpFileIOPeon(true);
            FloatSupplierSerializer writer = CompressionFactory.getFloatSerializer(iopeon, "float",
                    ByteOrder.nativeOrder(), compression);
            BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(dataFile)));

            try (FileChannel output = FileChannel.open(compFile.toPath(), StandardOpenOption.CREATE_NEW,
                    StandardOpenOption.WRITE)) {
                writer.open();
                String line;
                while ((line = br.readLine()) != null) {
                    writer.add(Float.parseFloat(line));
                }
                final ByteArrayOutputStream baos = new ByteArrayOutputStream();
                writer.closeAndConsolidate(new ByteSink() {
                    @Override
                    public OutputStream openStream() throws IOException {
                        return baos;
                    }
                });
                output.write(ByteBuffer.wrap(baos.toByteArray()));
            } finally {
                iopeon.cleanup();
                br.close();
            }
            System.out.print(compFile.length() / 1024 + "\n");
        }
    }
}

From source file:io.druid.benchmark.LongCompressionBenchmarkFileGenerator.java

public static void main(String[] args) throws IOException, URISyntaxException {
    if (args.length >= 1) {
        dirPath = args[0];/*from w ww  .  j av  a2 s .  c  om*/
    }

    BenchmarkColumnSchema enumeratedSchema = BenchmarkColumnSchema.makeEnumerated("", ValueType.LONG, true, 1,
            0d, ImmutableList.<Object>of(0, 1, 2, 3, 4), ImmutableList.of(0.95, 0.001, 0.0189, 0.03, 0.0001));
    BenchmarkColumnSchema zipfLowSchema = BenchmarkColumnSchema.makeZipf("", ValueType.LONG, true, 1, 0d, -1,
            1000, 1d);
    BenchmarkColumnSchema zipfHighSchema = BenchmarkColumnSchema.makeZipf("", ValueType.LONG, true, 1, 0d, -1,
            1000, 3d);
    BenchmarkColumnSchema sequentialSchema = BenchmarkColumnSchema.makeSequential("", ValueType.LONG, true, 1,
            0d, 1470187671, 2000000000);
    BenchmarkColumnSchema uniformSchema = BenchmarkColumnSchema.makeDiscreteUniform("", ValueType.LONG, true, 1,
            0d, 0, 1000);

    Map<String, BenchmarkColumnValueGenerator> generators = new HashMap<>();
    generators.put("enumerate", new BenchmarkColumnValueGenerator(enumeratedSchema, 1));
    generators.put("zipfLow", new BenchmarkColumnValueGenerator(zipfLowSchema, 1));
    generators.put("zipfHigh", new BenchmarkColumnValueGenerator(zipfHighSchema, 1));
    generators.put("sequential", new BenchmarkColumnValueGenerator(sequentialSchema, 1));
    generators.put("uniform", new BenchmarkColumnValueGenerator(uniformSchema, 1));

    File dir = new File(dirPath);
    dir.mkdir();

    // create data files using BenchmarkColunValueGenerator
    for (Map.Entry<String, BenchmarkColumnValueGenerator> entry : generators.entrySet()) {
        final File dataFile = new File(dir, entry.getKey());
        dataFile.delete();
        try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(dataFile)))) {
            for (int i = 0; i < ROW_NUM; i++) {
                writer.write((long) entry.getValue().generateRowValue() + "\n");
            }
        }
    }

    // create compressed files using all combinations of CompressionStrategy and LongEncoding provided
    for (Map.Entry<String, BenchmarkColumnValueGenerator> entry : generators.entrySet()) {
        for (CompressedObjectStrategy.CompressionStrategy compression : compressions) {
            for (CompressionFactory.LongEncodingStrategy encoding : encodings) {
                String name = entry.getKey() + "-" + compression.toString() + "-" + encoding.toString();
                System.out.print(name + ": ");
                File compFile = new File(dir, name);
                compFile.delete();
                File dataFile = new File(dir, entry.getKey());

                TmpFileIOPeon iopeon = new TmpFileIOPeon(true);
                LongSupplierSerializer writer = CompressionFactory.getLongSerializer(iopeon, "long",
                        ByteOrder.nativeOrder(), encoding, compression);
                BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(dataFile)));

                try (FileChannel output = FileChannel.open(compFile.toPath(), StandardOpenOption.CREATE_NEW,
                        StandardOpenOption.WRITE)) {
                    writer.open();
                    String line;
                    while ((line = br.readLine()) != null) {
                        writer.add(Long.parseLong(line));
                    }
                    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
                    writer.closeAndConsolidate(new ByteSink() {
                        @Override
                        public OutputStream openStream() throws IOException {
                            return baos;
                        }
                    });
                    output.write(ByteBuffer.wrap(baos.toByteArray()));
                } finally {
                    iopeon.cleanup();
                    br.close();
                }
                System.out.print(compFile.length() / 1024 + "\n");
            }
        }
    }
}

From source file:org.sonar.flex.toolkit.FlexToolkit.java

@VisibleForTesting
static List<Tokenizer> getTokenizers() {
    return ImmutableList.of(new StringTokenizer("<span class=\"s\">", "</span>"),
            new CDocTokenizer("<span class=\"cd\">", "</span>"),
            new JavadocTokenizer("<span class=\"cppd\">", "</span>"),
            new CppDocTokenizer("<span class=\"cppd\">", "</span>"),
            new KeywordsTokenizer("<span class=\"k\">", "</span>", FlexKeyword.keywordValues()));
}

From source file:org.sonar.javascript.toolkit.JavaScriptToolkit.java

public static List<Tokenizer> getTokenizers() {
    return ImmutableList.of(new StringTokenizer("<span class=\"s\">", "</span>"),
            new CDocTokenizer("<span class=\"cd\">", "</span>"),
            new JavadocTokenizer("<span class=\"cppd\">", "</span>"),
            new CppDocTokenizer("<span class=\"cppd\">", "</span>"),
            new KeywordsTokenizer("<span class=\"k\">", "</span>", EcmaScriptKeyword.keywordValues()));
}

From source file:org.haiku.haikudepotserver.support.cayenne.ExpressionHelper.java

/**
 * <p>This method will produce an expression that can be used in a Cayenne query.</p>
 *//* w w w.j a  va  2 s. c  om*/

public static Expression toExpression(VersionCoordinates coordinates, String prefix) {
    Preconditions.checkNotNull(coordinates);
    Expression majorE = ExpressionFactory.matchExp(prefixKey(prefix, PkgVersion.MAJOR.getName()),
            coordinates.getMajor());
    Expression minorE = ExpressionFactory.matchExp(prefixKey(prefix, PkgVersion.MINOR.getName()),
            coordinates.getMinor());
    Expression microE = ExpressionFactory.matchExp(prefixKey(prefix, PkgVersion.MICRO.getName()),
            coordinates.getMicro());
    Expression preReleaseE = ExpressionFactory.matchExp(prefixKey(prefix, PkgVersion.PRE_RELEASE.getName()),
            coordinates.getPreRelease());
    Expression revisionE = ExpressionFactory.matchExp(prefixKey(prefix, PkgVersion.REVISION.getName()),
            coordinates.getRevision());
    return andAll(ImmutableList.of(majorE, minorE, microE, preReleaseE, revisionE));
}

From source file:org.sonar.lua.toolkit.LuaToolkit.java

@VisibleForTesting
static List<Tokenizer> getTokenizers() {
    return ImmutableList.of(new StringTokenizer("<span class=\"s\">", "</span>"),
            new CDocTokenizer("<span class=\"cd\">", "</span>"),
            new JavadocTokenizer("<span class=\"cppd\">", "</span>"),
            new CppDocTokenizer("<span class=\"cppd\">", "</span>"),
            new KeywordsTokenizer("<span class=\"k\">", "</span>", LuaKeyword.keywordValues()));
}