Example usage for com.google.common.collect Lists newArrayList

List of usage examples for com.google.common.collect Lists newArrayList

Introduction

In this page you can find the example usage for com.google.common.collect Lists newArrayList.

Prototype

@GwtCompatible(serializable = true)
public static <E> ArrayList<E> newArrayList() 

Source Link

Document

Creates a mutable, empty ArrayList instance (for Java 6 and earlier).

Usage

From source file:mvm.rya.indexing.external.ExternalSailExample.java

public static void main(String[] args) throws Exception {

    Sail s = new MemoryStore();
    SailRepository repo = new SailRepository(s);
    repo.initialize();//from   w w w .  j a v a  2s. com
    SailRepositoryConnection conn = repo.getConnection();

    URI sub = new URIImpl("uri:entity");
    URI subclass = new URIImpl("uri:class");
    URI obj = new URIImpl("uri:obj");
    URI talksTo = new URIImpl("uri:talksTo");

    conn.add(sub, RDF.TYPE, subclass);
    conn.add(sub, RDFS.LABEL, new LiteralImpl("label"));
    conn.add(sub, talksTo, obj);

    URI sub2 = new URIImpl("uri:entity2");
    URI subclass2 = new URIImpl("uri:class2");
    URI obj2 = new URIImpl("uri:obj2");

    conn.add(sub2, RDF.TYPE, subclass2);
    conn.add(sub2, RDFS.LABEL, new LiteralImpl("label2"));
    conn.add(sub2, talksTo, obj2);

    // TODO Auto-generated method stub
    String indexSparqlString = ""//
            + "SELECT ?e ?l ?c " //
            + "{" //
            + "  ?e a ?c . "//
            + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l "//
            + "}";//

    conn.prepareTupleQuery(QueryLanguage.SPARQL, indexSparqlString)
            .evaluate(new SPARQLResultsXMLWriter(System.out));

    SPARQLParser sp = new SPARQLParser();
    ParsedQuery pq = sp.parseQuery(indexSparqlString, null);
    System.out.println(pq);

    List<ExternalTupleSet> index = Lists.newArrayList();

    Connector accCon = new MockInstance().getConnector("root", "".getBytes());
    String tablename = "table";
    accCon.tableOperations().create(tablename);
    index.add(new AccumuloIndexSet(indexSparqlString, conn, accCon, tablename));

    String queryString = ""//
            + "SELECT ?e ?c ?l ?o " //
            + "{" //
            + "  ?e a ?c . "//
            + "  ?e <http://www.w3.org/2000/01/rdf-schema#label> ?l . "//
            + "  ?e <uri:talksTo> ?o . "//
            + "}";//

    conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString).evaluate(new SPARQLResultsXMLWriter(System.out));

    pq = sp.parseQuery(queryString, null);
    QueryModelTreePrinter mp = new QueryModelTreePrinter();
    pq.getTupleExpr().visit(mp);
    System.out.println(mp.getTreeString());
    System.out.println(pq.getTupleExpr());

    System.out.println("++++++++++++");
    ExternalProcessor processor = new ExternalProcessor(index);
    System.out.println(processor.process(pq.getTupleExpr()));

    System.out.println("----------------");
    Sail processingSail = new ExternalSail(s, processor);
    SailRepository smartSailRepo = new SailRepository(processingSail);
    smartSailRepo.initialize();

    smartSailRepo.getConnection().prepareTupleQuery(QueryLanguage.SPARQL, queryString)
            .evaluate(new SPARQLResultsXMLWriter(System.out));

}

From source file:com.netflix.servo.examples.ManyMetricsExample.java

public static void main(String[] args) throws Exception {
    //DefaultMonitorRegistry.getInstance().register(counter);

    if (args.length != 2) {
        System.out.println("Usage: ManyMetricsExample <tagsPerMetric> <numMetrics>");
        System.exit(1);/*  w  w  w.  jav a2  s  .c om*/
    }
    final int tagsPerMetric = Integer.valueOf(args[0]);
    final int numMetrics = Integer.valueOf(args[1]);

    final List<Counter> counters = Lists.newArrayList();
    for (int i = 0; i < numMetrics; ++i) {
        final Counter c = newCounter(tagsPerMetric, i);
        counters.add(c);
        DefaultMonitorRegistry.getInstance().register(c);
    }

    startPolling();

    final ThreadCpuStats stats = ThreadCpuStats.getInstance();
    stats.start();

    // Update the counters once in a while
    final Timer t = Monitors.newTimer("updateCounts");
    DefaultMonitorRegistry.getInstance().register(t);
    final long delay = 500L;
    final int report = 120;
    int count = 0;
    while (true) {
        final Stopwatch s = t.start();
        try {
            for (Counter c : counters) {
                c.increment();
            }

            if (count % report == 0) {
                stats.printThreadCpuUsages();
            }

            Thread.sleep(delay);
            ++count;
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            s.stop();
        }
    }
}

From source file:org.eknet.swing.uploadfield.DemoForm.java

public static void main(String[] args) {
    final DemoForm form = new DemoForm();
    form.fileInput.addPropertyChangeListener(new PropertyChangeListener() {
        @Override//from www. j a v a2s.c o  m
        public void propertyChange(PropertyChangeEvent evt) {
            if (evt.getPropertyName().equals(UploadField.VALUE_PROPERTY_NAME)) {
                System.out.println("New Image: " + evt.getNewValue());
            }
        }
    });
    form.multiUploadField.addPropertyChangeListener(new PropertyChangeListener() {
        @Override
        public void propertyChange(PropertyChangeEvent evt) {
            if (evt.getPropertyName().equals(MultiUploadField.VALUE_PROPERTY_NAME)) {
                List list = (List) evt.getNewValue();
                System.out.println("new images [ " + list.size() + " ]:" + list);
            }
        }
    });
    TestPanel.start(form, "Form Preview");
    new Thread(new Runnable() {
        @Override
        public void run() {
            try {
                Thread.sleep(2500);
            } catch (InterruptedException e) {
            }
            List<URL> urls = IconViewerTest.getIconURLs();
            List<UploadValue> values = Lists.newArrayList();
            for (URL url : urls) {
                values.add(new DefaultUploadValue(url, "testname"));
            }
            System.out.println(">> setting values: " + values);
            form.multiUploadField.setUploadValueList(values);
        }
    }).start();
}

From source file:org.apache.mahout.cf.taste.example.netflix.TransposeToByUser.java

public static void main(String[] args) throws IOException, OptionException {

    File dataDirectory = TasteOptionParser.getRatings(args);
    File byItemDirectory = new File(dataDirectory, "training_set");
    File byUserDirectory = new File(dataDirectory, "training_set_by_user");

    Preconditions.checkArgument(dataDirectory.exists() && dataDirectory.isDirectory(), "%s is not a directory",
            dataDirectory);/*from   ww  w.  j  av a2 s. co m*/
    Preconditions.checkArgument(byItemDirectory.exists() && byItemDirectory.isDirectory(),
            "%s is not a directory", byItemDirectory);
    Preconditions.checkArgument(!byUserDirectory.exists(), "%s already exists", byUserDirectory);

    byUserDirectory.mkdirs();

    Map<String, List<String>> byUserEntryCache = new FastMap<String, List<String>>(100000);

    for (File byItemFile : byItemDirectory.listFiles()) {
        log.info("Processing {}", byItemFile);
        Iterator<String> lineIterator = new FileLineIterable(byItemFile, false).iterator();
        String line = lineIterator.next();
        String movieIDString = line.substring(0, line.length() - 1);
        while (lineIterator.hasNext()) {
            line = lineIterator.next();
            int firstComma = line.indexOf(',');
            String userIDString = line.substring(0, firstComma);
            int secondComma = line.indexOf(',', firstComma + 1);
            String ratingString = line.substring(firstComma, secondComma); // keep comma
            List<String> cachedLines = byUserEntryCache.get(userIDString);
            if (cachedLines == null) {
                cachedLines = Lists.newArrayList();
                byUserEntryCache.put(userIDString, cachedLines);
            }
            cachedLines.add(movieIDString + ratingString);
            maybeFlushCache(byUserDirectory, byUserEntryCache);
        }

    }

}

From source file:tl.lin.data.map.FrontCodedString2IntBidiMapBuilder.java

@SuppressWarnings({ "static-access" })
public static void main(String[] args) throws Exception {
    Options options = new Options();

    options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("input path").create(INPUT));
    options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("output path").create(OUTPUT));

    CommandLine cmdline = null;// w w w.j  a  va 2  s  .  c  o m
    CommandLineParser parser = new GnuParser();

    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException exp) {
        System.err.println("Error parsing command line: " + exp.getMessage());
        System.exit(-1);
    }

    if (!cmdline.hasOption(INPUT) || !cmdline.hasOption(OUTPUT)) {
        System.out.println("args: " + Arrays.toString(args));
        HelpFormatter formatter = new HelpFormatter();
        formatter.setWidth(120);
        formatter.printHelp(FrontCodedString2IntBidiMapBuilder.class.getName(), options);
        ToolRunner.printGenericCommandUsage(System.out);
        System.exit(-1);
    }

    String input = cmdline.getOptionValue(INPUT);
    String output = cmdline.getOptionValue(OUTPUT);

    List<String> stringList = Lists.newArrayList();
    IntArrayList intList = new IntArrayList();

    // First read lines into sorted map to sort input.
    Object2IntAVLTreeMap<String> tree = new Object2IntAVLTreeMap<String>();
    BufferedReader br = new BufferedReader(new FileReader(input));
    String line;
    while ((line = br.readLine()) != null) {
        String[] arr = line.split("\\t");
        if (arr[0] == null || arr[0].length() == 0) {
            LOG.info("Skipping invalid line: " + line);
        }
        tree.put(arr[0], Integer.parseInt(arr[1]));
    }
    br.close();

    // Extracted sorted strings and ints.
    for (Object2IntMap.Entry<String> map : tree.object2IntEntrySet()) {
        stringList.add(map.getKey());
        intList.add(map.getIntValue());
    }

    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(conf);

    FSDataOutputStream os = fs.create(new Path(output), true);

    ByteArrayOutputStream bytesOut;
    ObjectOutputStream objOut;
    byte[] bytes;

    // Serialize the front-coded dictionary
    FrontCodedStringList frontcodedList = new FrontCodedStringList(stringList, 8, true);

    bytesOut = new ByteArrayOutputStream();
    objOut = new ObjectOutputStream(bytesOut);
    objOut.writeObject(frontcodedList);
    objOut.close();

    bytes = bytesOut.toByteArray();
    os.writeInt(bytes.length);
    os.write(bytes);

    // Serialize the hash function
    ShiftAddXorSignedStringMap dict = new ShiftAddXorSignedStringMap(stringList.iterator(),
            new TwoStepsLcpMonotoneMinimalPerfectHashFunction<CharSequence>(stringList,
                    TransformationStrategies.prefixFreeUtf16()));

    bytesOut = new ByteArrayOutputStream();
    objOut = new ObjectOutputStream(bytesOut);
    objOut.writeObject(dict);
    objOut.close();

    bytes = bytesOut.toByteArray();
    os.writeInt(bytes.length);
    os.write(bytes);

    // Serialize the ints.
    os.writeInt(intList.size());
    for (int i = 0; i < intList.size(); i++) {
        os.writeInt(intList.getInt(i));
    }

    os.close();
}

From source file:pocman.demo.MazeDemo1.java

public static void main(final String[] args) throws InterruptedException {

    final Maze maze = Maze.from("" + "                       "
            + "      "
            + "                         "
            + "      "
            + "                      "
            + "    "
            + "                   "
            + "    "
            + "                "
            + "                 "
            + "    "
            + "                  "
            + "    "
            + "                        "
            + "      "
            + "              "
            + "      "
            + "                        "
            + "");

    final MazeNode firstCoinNode = maze.find(Tile.COIN);
    //System.out.println(firstCoinNode);

    final MazeNode nextCoinNode = maze.find(Tile.COIN, firstCoinNode);
    //System.out.println(nextCoinNode);

    final Map<MazeNode, Entry<Move, Integer>> graphNodeRange = maze.getGraphNodeRange(nextCoinNode);

    System.out.println(graphNodeRange);

    //System.exit(0);

    final MazeNode node1 = maze.getNearestGraphNode(firstCoinNode);
    //System.out.println(node1);

    //final MazeNode node2 = maze.getNearestGraphNode(nextCoinNode);
    //System.out.println(node2);

    PathInterface<MazeNode> shortestPath = null;
    for (final Entry<MazeNode, Entry<Move, Integer>> entry : graphNodeRange.entrySet()) {
        shortestPath = maze.getShortestPath(node1, entry.getKey());
        System.out.println(shortestPath);

        final List<WeightedEdge<MazeNode>> edges = shortestPath.getEdges();
        final List<MazeNode> trail = Lists.newArrayList();
        trail.add(edges.get(0).getEndPoint1());
        final int n = edges.size() - 1;
        for (int i = 1; i <= n; ++i) {
            trail.add(edges.get(i).getEndPoint1());
            trail.add(edges.get(i).getEndPoint2());
        }// ww w  .  j a  v  a  2 s . c  om
        trail.add(edges.get(n).getEndPoint2());
        //System.out.println(trail);

        debug(maze, trail, 160);

        final List<MazeNode> potential = Lists.newArrayList(trail);

        for (final MazeNode mazeNode : trail) {
            final Set<MazeNode> endPoints = maze.get().getConnectedEndPoints(mazeNode);
            //System.out.println(endPoints);
            potential.addAll(endPoints);
        }
        debug2(maze, potential);

    }

}

From source file:com.technobium.MultinomialLogisticRegression.java

public static void main(String[] args) throws Exception {
    // this test trains a 3-way classifier on the famous Iris dataset.
    // a similar exercise can be accomplished in R using this code:
    //    library(nnet)
    //    correct = rep(0,100)
    //    for (j in 1:100) {
    //      i = order(runif(150))
    //      train = iris[i[1:100],]
    //      test = iris[i[101:150],]
    //      m = multinom(Species ~ Sepal.Length + Sepal.Width + Petal.Length + Petal.Width, train)
    //      correct[j] = mean(predict(m, newdata=test) == test$Species)
    //    }//from w w w .  j a va2 s.com
    //    hist(correct)
    //
    // Note that depending on the training/test split, performance can be better or worse.
    // There is about a 5% chance of getting accuracy < 90% and about 20% chance of getting accuracy
    // of 100%
    //
    // This test uses a deterministic split that is neither outstandingly good nor bad

    RandomUtils.useTestSeed();
    Splitter onComma = Splitter.on(",");

    // read the data
    List<String> raw = Resources.readLines(Resources.getResource("iris.csv"), Charsets.UTF_8);

    // holds features
    List<Vector> data = Lists.newArrayList();

    // holds target variable
    List<Integer> target = Lists.newArrayList();

    // for decoding target values
    Dictionary dict = new Dictionary();

    // for permuting data later
    List<Integer> order = Lists.newArrayList();

    for (String line : raw.subList(1, raw.size())) {
        // order gets a list of indexes
        order.add(order.size());

        // parse the predictor variables
        Vector v = new DenseVector(5);
        v.set(0, 1);
        int i = 1;
        Iterable<String> values = onComma.split(line);
        for (String value : Iterables.limit(values, 4)) {
            v.set(i++, Double.parseDouble(value));
        }
        data.add(v);

        // and the target
        target.add(dict.intern(Iterables.get(values, 4)));
    }

    // randomize the order ... original data has each species all together
    // note that this randomization is deterministic
    Random random = RandomUtils.getRandom();
    Collections.shuffle(order, random);

    // select training and test data
    List<Integer> train = order.subList(0, 100);
    List<Integer> test = order.subList(100, 150);
    logger.warn("Training set = {}", train);
    logger.warn("Test set = {}", test);

    // now train many times and collect information on accuracy each time
    int[] correct = new int[test.size() + 1];
    for (int run = 0; run < 200; run++) {
        OnlineLogisticRegression lr = new OnlineLogisticRegression(3, 5, new L2(1));
        // 30 training passes should converge to > 95% accuracy nearly always but never to 100%
        for (int pass = 0; pass < 30; pass++) {
            Collections.shuffle(train, random);
            for (int k : train) {
                lr.train(target.get(k), data.get(k));
            }
        }

        // check the accuracy on held out data
        int x = 0;
        int[] count = new int[3];
        for (Integer k : test) {
            Vector vt = lr.classifyFull(data.get(k));
            int r = vt.maxValueIndex();
            count[r]++;
            x += r == target.get(k) ? 1 : 0;
        }
        correct[x]++;

        if (run == 199) {

            Vector v = new DenseVector(5);
            v.set(0, 1);
            int i = 1;
            Iterable<String> values = onComma.split("6.0,2.7,5.1,1.6,versicolor");
            for (String value : Iterables.limit(values, 4)) {
                v.set(i++, Double.parseDouble(value));
            }

            Vector vt = lr.classifyFull(v);
            for (String value : dict.values()) {
                System.out.println("target:" + value);
            }
            int t = dict.intern(Iterables.get(values, 4));

            int r = vt.maxValueIndex();
            boolean flag = r == t;
            lr.close();

            Closer closer = Closer.create();

            try {
                FileOutputStream byteArrayOutputStream = closer
                        .register(new FileOutputStream(new File("model.txt")));
                DataOutputStream dataOutputStream = closer
                        .register(new DataOutputStream(byteArrayOutputStream));
                PolymorphicWritable.write(dataOutputStream, lr);
            } finally {
                closer.close();
            }
        }
    }

    // verify we never saw worse than 95% correct,
    for (int i = 0; i < Math.floor(0.95 * test.size()); i++) {
        System.out.println(String.format("%d trials had unacceptable accuracy of only %.0f%%: ", correct[i],
                100.0 * i / test.size()));
    }
    // nor perfect
    System.out.println(String.format("%d trials had unrealistic accuracy of 100%%", correct[test.size() - 1]));
}

From source file:org.apache.tajo.client.TajoDump.java

public static void main(String[] args) throws ParseException, IOException, ServiceException, SQLException {
    TajoConf conf = new TajoConf();

    CommandLineParser parser = new PosixParser();
    CommandLine cmd = parser.parse(options, args);

    String hostName = null;/*from   w  ww  . j a v a2 s  . c o m*/
    Integer port = null;
    if (cmd.hasOption("h")) {
        hostName = cmd.getOptionValue("h");
    }
    if (cmd.hasOption("p")) {
        port = Integer.parseInt(cmd.getOptionValue("p"));
    }

    // if there is no "-h" option,
    if (hostName == null) {
        if (conf.getVar(TajoConf.ConfVars.TAJO_MASTER_CLIENT_RPC_ADDRESS) != null) {
            // it checks if the client service address is given in configuration and distributed mode.
            // if so, it sets entryAddr.
            hostName = conf.getVar(TajoConf.ConfVars.TAJO_MASTER_CLIENT_RPC_ADDRESS).split(":")[0];
        }
    }
    if (port == null) {
        if (conf.getVar(TajoConf.ConfVars.TAJO_MASTER_CLIENT_RPC_ADDRESS) != null) {
            // it checks if the client service address is given in configuration and distributed mode.
            // if so, it sets entryAddr.
            port = Integer
                    .parseInt(conf.getVar(TajoConf.ConfVars.TAJO_MASTER_CLIENT_RPC_ADDRESS).split(":")[1]);
        }
    }

    TajoClient client = null;
    if ((hostName == null) ^ (port == null)) {
        System.err.println("ERROR: cannot find valid Tajo server address");
        System.exit(-1);
    } else if (hostName != null && port != null) {
        conf.setVar(TajoConf.ConfVars.TAJO_MASTER_CLIENT_RPC_ADDRESS, hostName + ":" + port);
        client = new TajoClient(conf);
    } else if (hostName == null && port == null) {
        client = new TajoClient(conf);
    }

    List<TableDesc> tableDescList = Lists.newArrayList();

    if (cmd.hasOption("a")) {
        for (String tableName : client.getTableList()) {
            tableDescList.add(client.getTableDesc(tableName));
        }
    } else if (cmd.getArgs().length > 0) {
        for (String tableName : cmd.getArgs()) {
            tableDescList.add(client.getTableDesc(tableName));
        }
    } else {
        printUsage();
    }

    Writer writer = new PrintWriter(System.out);
    writer.write("--\n");
    writer.write("-- Tajo database dump\n");
    writer.write("-- Dump date: " + toDateString() + "\n");
    writer.write("--\n");
    writer.write("\n");
    for (TableDesc tableDesc : tableDescList) {
        writer.write(DDLBuilder.buildDDL(tableDesc));
        writer.write("\n\n");
    }
    writer.flush();
    writer.close();
    System.exit(0);
}

From source file:com.google.litecoin.examples.PrintPeers.java

public static void main(String[] args) throws Exception {
    BriefLogFormatter.init();//  w w  w  . j a v a 2s.  co  m
    System.out.println("=== IRC ===");
    printIRC();
    System.out.println("=== DNS ===");
    printDNS();
    System.out.println("=== Version/chain heights ===");

    ArrayList<InetAddress> addrs = new ArrayList<InetAddress>();
    for (InetSocketAddress peer : dnsPeers)
        addrs.add(peer.getAddress());
    for (InetSocketAddress peer : ircPeers)
        addrs.add(peer.getAddress());
    System.out.println("Scanning " + addrs.size() + " peers:");

    final NetworkParameters params = NetworkParameters.prodNet();
    final Object lock = new Object();
    final long[] bestHeight = new long[1];

    List<ListenableFuture<TCPNetworkConnection>> futures = Lists.newArrayList();
    for (final InetAddress addr : addrs) {
        final ListenableFuture<TCPNetworkConnection> future = TCPNetworkConnection.connectTo(params,
                new InetSocketAddress(addr, params.port), 1000 /* timeout */);
        futures.add(future);
        // Once the connection has completed version handshaking ...
        Futures.addCallback(future, new FutureCallback<TCPNetworkConnection>() {
            public void onSuccess(TCPNetworkConnection conn) {
                // Check the chain height it claims to have.
                VersionMessage ver = conn.getVersionMessage();
                long nodeHeight = ver.bestHeight;
                synchronized (lock) {
                    long diff = bestHeight[0] - nodeHeight;
                    if (diff > 0) {
                        System.out.println("Node is behind by " + diff + " blocks: " + addr);
                    } else if (diff == 0) {
                        System.out.println("Node " + addr + " has " + nodeHeight + " blocks");
                        bestHeight[0] = nodeHeight;
                    } else if (diff < 0) {
                        System.out.println("Node is ahead by " + Math.abs(diff) + " blocks: " + addr);
                        bestHeight[0] = nodeHeight;
                    }
                }
                conn.close();
            }

            public void onFailure(Throwable throwable) {
                System.out.println("Failed to talk to " + addr + ": " + throwable.getMessage());
            }
        });
    }
    // Wait for every tried connection to finish.
    Futures.successfulAsList(futures).get();
}

From source file:org.apache.mahout.classifier.sequencelearning.hmm.ViterbiEvaluator.java

public static void main(String[] args) throws IOException {
    DefaultOptionBuilder optionBuilder = new DefaultOptionBuilder();
    ArgumentBuilder argumentBuilder = new ArgumentBuilder();

    Option inputOption = DefaultOptionCreator.inputOption().create();

    Option outputOption = DefaultOptionCreator.outputOption().create();

    Option modelOption = optionBuilder.withLongName("model").withDescription("Path to serialized HMM model")
            .withShortName("m")
            .withArgument(argumentBuilder.withMaximum(1).withMinimum(1).withName("path").create())
            .withRequired(true).create();

    Option likelihoodOption = optionBuilder.withLongName("likelihood")
            .withDescription("Compute likelihood of observed sequence").withShortName("l").withRequired(false)
            .create();//from   w w w.jav  a  2  s. co m

    Group optionGroup = new GroupBuilder().withOption(inputOption).withOption(outputOption)
            .withOption(modelOption).withOption(likelihoodOption).withName("Options").create();

    try {
        Parser parser = new Parser();
        parser.setGroup(optionGroup);
        CommandLine commandLine = parser.parse(args);

        String input = (String) commandLine.getValue(inputOption);
        String output = (String) commandLine.getValue(outputOption);

        String modelPath = (String) commandLine.getValue(modelOption);

        boolean computeLikelihood = commandLine.hasOption(likelihoodOption);

        //reading serialized HMM
        DataInputStream modelStream = new DataInputStream(new FileInputStream(modelPath));
        HmmModel model;
        try {
            model = LossyHmmSerializer.deserialize(modelStream);
        } finally {
            Closeables.close(modelStream, true);
        }

        //reading observations
        List<Integer> observations = Lists.newArrayList();
        Scanner scanner = new Scanner(new FileInputStream(input), "UTF-8");
        try {
            while (scanner.hasNextInt()) {
                observations.add(scanner.nextInt());
            }
        } finally {
            scanner.close();
        }

        int[] observationsArray = new int[observations.size()];
        for (int i = 0; i < observations.size(); ++i) {
            observationsArray[i] = observations.get(i);
        }

        //decoding
        int[] hiddenStates = HmmEvaluator.decode(model, observationsArray, true);

        //writing output
        PrintWriter writer = new PrintWriter(
                new OutputStreamWriter(new FileOutputStream(output), Charsets.UTF_8), true);
        try {
            for (int hiddenState : hiddenStates) {
                writer.print(hiddenState);
                writer.print(' ');
            }
        } finally {
            Closeables.close(writer, false);
        }

        if (computeLikelihood) {
            System.out.println("Likelihood: " + HmmEvaluator.modelLikelihood(model, observationsArray, true));
        }
    } catch (OptionException e) {
        CommandLineUtil.printHelp(optionGroup);
    }
}