List of usage examples for com.google.common.collect Iterators getOnlyElement
public static <T> T getOnlyElement(Iterator<T> iterator)
From source file:ch.ethz.inf.vs.hypermedia.corehal.LinkCollection.java
public Link getLink(String s, String name) { Iterable<Link> links = getLinks(s, name); if (links == null) return null; return Iterators.getOnlyElement(links.iterator()); }
From source file:org.apache.drill.exec.physical.impl.broadcastsender.BroadcastSenderCreator.java
@Override public RootExec getRoot(FragmentContext context, BroadcastSender config, List<RecordBatch> children) throws ExecutionSetupException { assert children != null && children.size() == 1; return new BroadcastSenderRootExec(context, Iterators.getOnlyElement(children.iterator()), config); }
From source file:io.druid.data.input.impl.SqlFirehose.java
@Nullable @Override//from ww w .j a va 2 s .c o m public InputRow nextRow() { Map<String, Object> mapToParse = lineIterator.next(); return (InputRow) Iterators.getOnlyElement(parser.parseBatch(mapToParse).iterator()); }
From source file:com.cloudera.validation.BeanConstraintValidatorFactory.java
private String getBeanName(Class<?> key) { String[] beanNames = beanFactory.getBeanNamesForType(key); return Iterators.getOnlyElement(Iterators.forArray(beanNames)); }
From source file:org.apache.druid.indexer.hadoop.DatasourceRecordReader.java
@Override public void initialize(InputSplit split, final TaskAttemptContext context) throws IOException { List<WindowedDataSegment> segments = ((DatasourceInputSplit) split).getSegments(); String dataSource = Iterators .getOnlyElement(segments.stream().map(s -> s.getSegment().getDataSource()).distinct().iterator()); spec = DatasourceInputFormat.getIngestionSpec(context.getConfiguration(), dataSource); logger.info("load schema [%s]", spec); List<WindowedStorageAdapter> adapters = Lists.transform(segments, new Function<WindowedDataSegment, WindowedStorageAdapter>() { @Override// w w w . j a v a 2s . com public WindowedStorageAdapter apply(WindowedDataSegment segment) { try { logger.info("Getting storage path for segment [%s]", segment.getSegment().getIdentifier()); Path path = new Path(JobHelper.getURIFromSegment(segment.getSegment())); logger.info("Fetch segment files from [%s]", path); File dir = Files.createTempDir(); tmpSegmentDirs.add(dir); logger.info("Locally storing fetched segment at [%s]", dir); JobHelper.unzipNoGuava(path, context.getConfiguration(), dir, context, null); logger.info("finished fetching segment files"); QueryableIndex index = HadoopDruidIndexerConfig.INDEX_IO.loadIndex(dir); indexes.add(index); numRows += index.getNumRows(); return new WindowedStorageAdapter(new QueryableIndexStorageAdapter(index), segment.getInterval()); } catch (IOException ex) { throw Throwables.propagate(ex); } } }); firehose = new IngestSegmentFirehose(adapters, spec.getTransformSpec(), spec.getDimensions(), spec.getMetrics(), spec.getFilter()); }
From source file:org.sosy_lab.cpachecker.cpa.smg.join.SMGJoinMatchObjects.java
final private static boolean checkConsistentFields(SMGObject pObj1, SMGObject pObj2, SMGNodeMapping pMapping1, SMGNodeMapping pMapping2, SMG pSMG1, SMG pSMG2) { List<SMGEdgeHasValue> fields = new ArrayList<>(); fields.addAll(pSMG1.getHVEdges(SMGEdgeHasValueFilter.objectFilter(pObj1))); fields.addAll(pSMG2.getHVEdges(SMGEdgeHasValueFilter.objectFilter(pObj2))); //TODO: We go through some fields twice, fix for (SMGEdgeHasValue hv : fields) { Set<SMGEdgeHasValue> hv1 = pSMG1.getHVEdges(SMGEdgeHasValueFilter.objectFilter(pObj1) .filterByType(hv.getType()).filterAtOffset(hv.getOffset())); Set<SMGEdgeHasValue> hv2 = pSMG2.getHVEdges(SMGEdgeHasValueFilter.objectFilter(pObj2) .filterByType(hv.getType()).filterAtOffset(hv.getOffset())); if (hv1.size() > 0 && hv2.size() > 0) { Integer v1 = Iterators.getOnlyElement(hv1.iterator()).getValue(); Integer v2 = Iterators.getOnlyElement(hv2.iterator()).getValue(); if (pMapping1.containsKey(v1) && pMapping2.containsKey(v2) && !(pMapping1.get(v1).equals(pMapping2.get(v2)))) { return true; }/*from w ww .j a v a 2s.c o m*/ } } return false; }
From source file:org.sosy_lab.cpachecker.cpa.smgfork.join.SMGJoinMatchObjects.java
final private static boolean checkConsistentFields(SMGObject pObj1, SMGObject pObj2, SMGNodeMapping pMapping1, SMGNodeMapping pMapping2, ReadableSMG pSMG1, ReadableSMG pSMG2) { List<SMGEdgeHasValue> fields = new ArrayList<>(); fields.addAll(pSMG1.getHVEdges(SMGEdgeHasValueFilter.objectFilter(pObj1))); fields.addAll(pSMG2.getHVEdges(SMGEdgeHasValueFilter.objectFilter(pObj2))); //TODO: We go through some fields twice, fix for (SMGEdgeHasValue hv : fields) { Set<SMGEdgeHasValue> hv1 = pSMG1.getHVEdges(SMGEdgeHasValueFilter.objectFilter(pObj1) .filterByType(hv.getType()).filterAtOffset(hv.getOffset())); Set<SMGEdgeHasValue> hv2 = pSMG2.getHVEdges(SMGEdgeHasValueFilter.objectFilter(pObj2) .filterByType(hv.getType()).filterAtOffset(hv.getOffset())); if (hv1.size() > 0 && hv2.size() > 0) { Integer v1 = Iterators.getOnlyElement(hv1.iterator()).getValue(); Integer v2 = Iterators.getOnlyElement(hv2.iterator()).getValue(); if (pMapping1.containsKey(v1) && pMapping2.containsKey(v2) && !(pMapping1.get(v1).equals(pMapping2.get(v2)))) { return true; }//from ww w.j a va 2 s . co m } } return false; }
From source file:org.apache.beam.runners.samza.translation.ParDoBoundMultiTranslator.java
ParDoBoundMultiTranslator() { final Iterator<SamzaDoFnInvokerRegistrar> invokerReg = ServiceLoader.load(SamzaDoFnInvokerRegistrar.class) .iterator();/*from ww w .ja v a2 s. c o m*/ doFnInvokerRegistrar = invokerReg.hasNext() ? Iterators.getOnlyElement(invokerReg) : null; }
From source file:cz.afri.smg.join.SMGJoinMatchObjects.java
private static boolean checkConsistentFields(final SMGObject pObj1, final SMGObject pObj2, final SMGNodeMapping pMapping1, final SMGNodeMapping pMapping2, final ReadableSMG pSMG1, final ReadableSMG pSMG2) { List<SMGEdgeHasValue> fields = new ArrayList<>(); Iterables.addAll(fields, pSMG1.getHVEdges(SMGEdgeHasValueFilter.objectFilter(pObj1))); Iterables.addAll(fields, pSMG2.getHVEdges(SMGEdgeHasValueFilter.objectFilter(pObj2))); //TODO: We go through some fields twice, fix for (SMGEdgeHasValue hv : fields) { SMGEdgeHasValueFilter filter1 = SMGEdgeHasValueFilter.objectFilter(pObj1); filter1.filterByType(hv.getType()).filterAtOffset(hv.getOffset()); Iterable<SMGEdgeHasValue> hv1 = pSMG1.getHVEdges(filter1); SMGEdgeHasValueFilter filter2 = SMGEdgeHasValueFilter.objectFilter(pObj2); filter2.filterByType(hv.getType()).filterAtOffset(hv.getOffset()); Iterable<SMGEdgeHasValue> hv2 = pSMG2.getHVEdges(filter2); if (hv1.iterator().hasNext() && hv2.iterator().hasNext()) { Integer v1 = Iterators.getOnlyElement(hv1.iterator()).getValue(); Integer v2 = Iterators.getOnlyElement(hv2.iterator()).getValue(); if (pMapping1.containsKey(v1) && pMapping2.containsKey(v2) && !(pMapping1.get(v1).equals(pMapping2.get(v2)))) { return true; }/*from w ww.ja va2 s . c o m*/ } } return false; }
From source file:org.jclouds.rackspace.clouddns.v1.functions.ParseJob.java
protected Object parseResponse(String requestUrl, JsonBall response) { if (response == null) { return null; } else if (requestUrl.contains("import")) { Type type = new TypeLiteral<Map<String, Set<ParseDomain.RawDomain>>>() { }.getType();/*from ww w. j av a2 s. com*/ Map<String, Set<RawDomain>> domainMap = json.fromJson(response.toString(), type); Domain domain = Iterators.getOnlyElement(domainMap.get("domains").iterator()).getDomain(); return domain; } else if (requestUrl.contains("export")) { Type type = new TypeLiteral<Map<String, String>>() { }.getType(); Map<String, String> exportMap = json.fromJson(response.toString(), type); String contents = exportMap.get("contents"); List<String> contentsAsList = Lists.newArrayList(Splitter.on("\n").omitEmptyStrings().split(contents)); return contentsAsList; } else if (response.toString().contains("domains")) { Type type = new TypeLiteral<Map<String, Set<RawDomain>>>() { }.getType(); Map<String, Set<RawDomain>> domainMap = json.fromJson(response.toString(), type); Set<Domain> domains = FluentIterable.from(domainMap.get("domains")).transform(toDomain).toSet(); return domains; } else if (response.toString().contains("records")) { Type type = new TypeLiteral<Map<String, Set<RawRecord>>>() { }.getType(); Map<String, Set<RawRecord>> recordMap = json.fromJson(response.toString(), type); Set<RecordDetail> records = FluentIterable.from(recordMap.get("records")).transform(toRecordDetails) .toSet(); if (isCreateSingleRecord) { return Iterables.getOnlyElement(records); } else { return records; } } else { throw new IllegalStateException( "Job parsing problem. Did not recognize any type in job response.\n" + response.toString()); } }