List of usage examples for com.google.common.collect Iterators transform
public static <F, T> Iterator<T> transform(final Iterator<F> fromIterator, final Function<? super F, ? extends T> function)
From source file:com.bah.culvert.tableadapters.HBaseTableAdapter.java
@Override public SeekingCurrentIterator get(Get get) { CRange range = get.getRange();//from w ww . j ava 2s . c om byte[] start = range.getStart(); byte[] end = range.getEnd(); // if we are just getting a single row if (Bytes.compareTo(start, end) == 0) { // if it is not inclusive, we have no results if (!range.isEndInclusive() || !range.isStartInclusive()) return new DecoratingCurrentIterator(new ArrayList<Result>(0).iterator()); // setup the get org.apache.hadoop.hbase.client.Get hGet = new org.apache.hadoop.hbase.client.Get(start); // add the columns to get for (CColumn column : get.getColumns()) { if (column.getColumnQualifier().length == 0) { // XXX hack to make sure that we don't get from an empty column if (column.getColumnFamily().length == 0) hGet.addFamily(DEFAULT_COLUMN); else hGet.addFamily(column.getColumnFamily()); } else { // XXX hack to make sure that we don't get from an empty column if (column.getColumnFamily().length == 0) hGet.addColumn(DEFAULT_COLUMN, column.getColumnQualifier()); else hGet.addColumn(column.getColumnFamily(), column.getColumnQualifier()); } } // do the get try { org.apache.hadoop.hbase.client.Result r = this.table.get(hGet); Iterator<Result> results = Iterators.transform(Collections.singletonList(r).iterator(), RESULT_CONVERTER); return new DecoratingCurrentIterator(results); } catch (IOException e) { throw new RuntimeException("Failed to get from HBase", e); } } // make sure that we get the right ranges // if we don't include the start if (!range.isStartInclusive()) start = Bytes.increment(start); // if we do include the end if (range.isEndInclusive()) end = Bytes.increment(end); // create a scanner [start, end) Scan scan = new Scan(start, end); // add the columns to scan for (CColumn column : get.getColumns()) { if (column.getColumnQualifier().length == 0) scan.addFamily(column.getColumnFamily()); else scan.addColumn(column.getColumnFamily(), column.getColumnQualifier()); } // do the scan try { ResultScanner scanner = this.table.getScanner(scan); return new DecoratingCurrentIterator(Iterators.transform(scanner.iterator(), RESULT_CONVERTER)); } catch (IOException e) { throw new RuntimeException(e); } }
From source file:org.locationtech.geogig.storage.mongo.MongoGraphDatabase.java
@Override public ImmutableList<ObjectId> getChildren(ObjectId id) { DBObject query = new BasicDBObject(); query.put("_label", Relationship.PARENT.name()); query.put("_out", id.toString()); DBCursor cursor = collection.find(query); Function<DBObject, ObjectId> idMapper = new Function<DBObject, ObjectId>() { @Override//from w w w .j a va2 s . co m public ObjectId apply(DBObject o) { return ObjectId.valueOf((String) o.get("_in")); } }; return ImmutableList.copyOf(Iterators.transform(cursor.iterator(), idMapper)); }
From source file:org.locationtech.geogig.plumbing.diff.LCSGeometryDiffImpl.java
private String geomToStringOfCoordinates(@Nullable Geometry geom) { if (null == geom) { return ""; }// ww w.ja v a 2 s.com final Function<Coordinate, String> printCoords = (c) -> Double.toString(c.x) + "," + Double.toString(c.y); StringBuilder sb = new StringBuilder(); sb.append(geom.getGeometryType() + " "); int n = geom.getNumGeometries(); for (int i = 0; i < n; i++) { Geometry subgeom = geom.getGeometryN(i); if (subgeom instanceof Polygon) { Polygon polyg = (Polygon) subgeom; Coordinate[] coords = polyg.getExteriorRing().getCoordinates(); Iterator<String> iter = Iterators.transform(Iterators.forArray(coords), printCoords); sb.append(Joiner.on(' ').join(iter)); for (int j = 0; j < polyg.getNumInteriorRing(); j++) { coords = polyg.getInteriorRingN(j).getCoordinates(); iter = Iterators.transform(Iterators.forArray(coords), printCoords); sb.append(" " + INNER_RING_SEPARATOR + " "); sb.append(Joiner.on(' ').join(iter)); } if (i < n - 1) { sb.append(" " + SUBGEOM_SEPARATOR + " "); } } else { Coordinate[] coords = subgeom.getCoordinates(); Iterator<String> iter = Iterators.transform(Iterators.forArray(coords), printCoords); sb.append(Joiner.on(' ').join(iter)); sb.append(" " + SUBGEOM_SEPARATOR + " "); } } String s = sb.toString().trim(); return s; }
From source file:no.ssb.jsonstat.v2.support.DatasetTableView.java
@Override public Map<List<String>, Number> row(List<String> rowKey) { return new AbstractMap<List<String>, Number>() { @Override/*ww w . j ava 2 s. co m*/ public Set<Entry<List<String>, Number>> entrySet() { return new AbstractSet<Entry<List<String>, Number>>() { @Override public Iterator<Entry<List<String>, Number>> iterator() { return Iterators.transform(DatasetTableView.this.columnKeySet().iterator(), columnKey -> { return new SimpleEntry<>(columnKey, DatasetTableView.this.get(rowKey, columnKey)); }); } @Override public int size() { return DatasetTableView.this.columnKeySet().size(); } }; } }; }
From source file:qdg.StaticMixedIdGraph.java
@Override public Iterator<Edge> getUEdgeIterator() { return Iterators.transform(uEdgeLace.getArcIterator(), constructUEdge); }
From source file:com.google.devtools.kythe.platform.indexpack.Archive.java
/** Returns an {@link Iterator} of the units stored in the archive with a given format key. */ public <T> Iterator<T> readUnits(final String formatKey, final Class<T> cls) throws IOException { Preconditions.checkNotNull(formatKey); return Iterators.filter(Iterators.transform(Files.newDirectoryStream(unitDir, "*" + UNIT_SUFFIX).iterator(), new Function<Path, T>() { @Override//from www . j a v a 2 s . co m public T apply(Path path) { try { String name = path.getFileName().toString(); if (!name.endsWith(UNIT_SUFFIX)) { throw new IllegalStateException("Received path without unit suffix: " + path); } String key = name.substring(0, name.length() - UNIT_SUFFIX.length()); return readUnit(key, formatKey, cls); } catch (IOException ioe) { throw Throwables.propagate(ioe); } } }), new Predicate<T>() { @Override public boolean apply(T unit) { return unit != null; } }); }
From source file:org.fcrepo.kernel.api.utils.iterators.RdfStream.java
/** * Apply a Function to an Iterator./*from w w w .j a va 2s. com*/ * * @param f the parameter f * @param <ToType> extends {@link Iterator} * @return Iterator */ public <ToType> Iterator<ToType> transform(final Function<? super Triple, ToType> f) { return Iterators.transform(this, f::apply); }
From source file:org.calrissian.accumulorecipes.thirdparty.pig.loader.EntityLoader.java
@Override public void prepareToRead(RecordReader recordReader, PigSplit pigSplit) throws IOException { RecordReaderValueIterator<Key, EntityWritable> rri = new RecordReaderValueIterator<Key, EntityWritable>( recordReader);/*from w ww . j a va2s.c o m*/ Iterator<Entity> xformed = Iterators.transform(rri, new GettableTransform<Entity>()); itr = new AttributeStoreIterator<Entity>(xformed); }
From source file:com.cloudera.oryx.lambda.speed.SpeedLayer.java
public synchronized void start() { log.info("Starting SparkContext for master {}, interval {} seconds", streamingMaster, generationIntervalSec);// w ww . ja v a 2 s .c o m long blockIntervalMS = TimeUnit.MILLISECONDS.convert(blockIntervalSec, TimeUnit.SECONDS); SparkConf sparkConf = new SparkConf(); sparkConf.setIfMissing("spark.serializer", "org.apache.spark.serializer.KryoSerializer"); String blockIntervalString = Long.toString(blockIntervalMS); sparkConf.setIfMissing("spark.streaming.blockInterval", blockIntervalString); // Turn this down to prevent long blocking at shutdown sparkConf.setIfMissing("spark.streaming.gracefulStopTimeout", blockIntervalString); sparkConf.setIfMissing("spark.cleaner.ttl", Integer.toString(20 * generationIntervalSec)); sparkConf.setIfMissing("spark.logConf", "true"); sparkConf.setIfMissing("spark.ui.port", Integer.toString(uiPort)); sparkConf.setMaster(streamingMaster); sparkConf.setAppName("OryxSpeedLayer"); long batchDurationMS = TimeUnit.MILLISECONDS.convert(generationIntervalSec, TimeUnit.SECONDS); JavaSparkContext sparkContext = new JavaSparkContext(sparkConf); streamingContext = new JavaStreamingContext(sparkContext, new Duration(batchDurationMS)); log.info("Creating message stream from topic"); JavaPairDStream<K, M> dStream = buildDStream(); Properties consumerProps = new Properties(); consumerProps.setProperty("group.id", "OryxGroup-SpeedLayer-" + System.currentTimeMillis()); consumerProps.setProperty("zookeeper.connect", updateTopicLockMaster); // Do start from the beginning of the update queue consumerProps.setProperty("auto.offset.reset", "smallest"); ConsumerConfig consumerConfig = new ConsumerConfig(consumerProps); consumer = Consumer.createJavaConsumerConnector(consumerConfig); KafkaStream<String, U> stream = consumer.createMessageStreams(Collections.singletonMap(updateTopic, 1), new StringDecoder(null), loadDecoderInstance()).get(updateTopic).get(0); final Iterator<KeyMessage<String, U>> transformed = Iterators.transform(stream.iterator(), new Function<MessageAndMetadata<String, U>, KeyMessage<String, U>>() { @Override public KeyMessage<String, U> apply(MessageAndMetadata<String, U> input) { return new KeyMessage<>(input.key(), input.message()); } }); modelManager = loadManagerInstance(); new Thread(new LoggingRunnable() { @Override public void doRun() throws IOException { modelManager.consume(transformed); } }).start(); dStream.foreachRDD(new SpeedLayerUpdate<>(modelManager, updateBroker, updateTopic)); streamingContext.start(); }
From source file:org.commoncrawl.mapred.pipelineV3.domainmeta.DomainMetadataTask.java
@Override protected void parseArgs() throws IOException { CommandLineParser parser = new GnuParser(); try {//w w w . j ava2 s. co m // parse the command line arguments CommandLine line = parser.parse(options, _args); // default to single partition - partition zero ImmutableList<Integer> partitions = ImmutableList.of(0); if (line.hasOption("partitions")) { partitions = ImmutableList.copyOf( Iterators.transform(Iterators.forArray(line.getOptionValue("partitions").split(",")), new Function<String, Integer>() { @Override public Integer apply(String arg0) { return Integer.parseInt(arg0); } })); } if (partitions.size() == 0) { throw new IOException("One Parition Required At a Minimum!"); } _partitionList = partitions; if (line.hasOption("rebuild")) { LOG.info("Rebuild Option Specified. Deleting Outputs"); for (CrawlPipelineStep step : getSteps()) { LOG.info("Deleting Output Dir:" + step.getOutputDir() + " for Step:" + step.getName()); getFileSystem().delete(step.getOutputDir(), true); } } } catch (Exception e) { LOG.error(StringUtils.stringifyException(e)); throw new IOException(e); } }