Example usage for java.util.stream Stream empty

List of usage examples for java.util.stream Stream empty

Introduction

In this page you can find the example usage for java.util.stream Stream empty.

Prototype

public static <T> Stream<T> empty() 

Source Link

Document

Returns an empty sequential Stream .

Usage

From source file:org.codice.ddf.configuration.migration.ImportMigrationManagerImplTest.java

@Test
public void testConstructorWithNullReport() throws Exception {
    thrown.expect(IllegalArgumentException.class);
    thrown.expectMessage(Matchers.containsString("null report"));

    new ImportMigrationManagerImpl(null, exportFile, Stream.empty(), zip);
}

From source file:org.talend.dataprep.dataset.service.DataSetService.java

@RequestMapping(value = "/datasets", method = RequestMethod.GET, produces = APPLICATION_JSON_VALUE)
@ApiOperation(value = "List all data sets and filters on certified, or favorite or a limited number when asked", notes = "Returns the list of data sets (and filters) the current user is allowed to see. Creation date is a Epoch time value (in UTC time zone).")
@Timed/*from  www.  j  a v  a2s .c om*/
public Callable<Stream<UserDataSetMetadata>> list(
        @ApiParam(value = "Sort key (by name, creation or modification date)") @RequestParam(defaultValue = "creationDate") Sort sort,
        @ApiParam(value = "Order for sort key (desc or asc or modif)") @RequestParam(defaultValue = "desc") Order order,
        @ApiParam(value = "Filter on name containing the specified name") @RequestParam(defaultValue = "") String name,
        @ApiParam(value = "Filter on certified data sets") @RequestParam(defaultValue = "false") boolean certified,
        @ApiParam(value = "Filter on favorite data sets") @RequestParam(defaultValue = "false") boolean favorite,
        @ApiParam(value = "Only return a limited number of data sets") @RequestParam(defaultValue = "false") boolean limit) {
    return () -> {
        // Build filter for data sets
        String userId = security.getUserId();
        final UserData userData = userDataRepository.get(userId);
        final List<String> predicates = new ArrayList<>();
        predicates.add("lifecycle.importing = false");
        if (favorite) {
            if (userData != null && !userData.getFavoritesDatasets().isEmpty()) {
                predicates.add("id in [" + userData.getFavoritesDatasets().stream().map(ds -> '\'' + ds + '\'')
                        .collect(Collectors.joining(",")) + "]");
            } else {
                // Wants favorites but user has no favorite
                return Stream.empty();
            }
        }
        if (certified) {
            predicates.add("governance.certificationStep = '" + Certification.CERTIFIED + "'");
        }
        if (!StringUtils.isEmpty(name)) {
            predicates.add("name contains '" + name + "'");
        }
        final String tqlFilter = predicates.stream().collect(Collectors.joining(" and "));
        LOG.debug("TQL Filter in use: {}", tqlFilter);

        // Get all data sets according to filter
        try (Stream<DataSetMetadata> stream = dataSetMetadataRepository.list(tqlFilter, sort, order)) {
            Stream<UserDataSetMetadata> userDataSetMetadataStream = stream
                    .map(m -> conversionService.convert(m, UserDataSetMetadata.class));
            if (sort == Sort.AUTHOR || sort == Sort.NAME) { // As theses are not well handled by mongo repository
                userDataSetMetadataStream = userDataSetMetadataStream
                        .sorted(getDataSetMetadataComparator(sort, order));
            }
            return userDataSetMetadataStream.limit(limit ? datasetListLimit : Long.MAX_VALUE);
        }
    };
}

From source file:com.hurence.logisland.connect.source.KafkaConnectStreamSource.java

@Override
public Dataset<Row> getBatch(Option<Offset> start, Offset end) {
    Long startOff = start.isDefined() ? Long.parseLong(start.get().json())
            : !bufferedRecords.isEmpty() ? bufferedRecords.firstKey() : 0L;

    Map<Integer, List<InternalRow>> current = new LinkedHashMap<>(
            bufferedRecords.subMap(startOff, Long.parseLong(end.json()) + 1)).keySet().stream()
                    .flatMap(offset -> {
                        List<Tuple2<SourceTask, SourceRecord>> srl = bufferedRecords.remove(offset);
                        if (srl != null) {
                            uncommittedRecords.put(offset, srl);
                            return srl.stream();
                        }/*w ww  .j  a va 2 s . co m*/
                        return Stream.empty();
                    }).map(Tuple2::_2).map(
                            sourceRecord -> InternalRow.fromSeq(JavaConversions
                                    .<Object>asScalaBuffer(Arrays.asList(toUTFString(sourceRecord.topic()),
                                            toUTFString(sourceRecord.sourcePartition()),
                                            toUTFString(sourceRecord.sourceOffset()),
                                            keyConverter.fromConnectData(sourceRecord.topic(),
                                                    sourceRecord.keySchema(), sourceRecord.key()),
                                            valueConverter.fromConnectData(sourceRecord.topic(),
                                                    sourceRecord.valueSchema(), sourceRecord.value())))
                                    .toSeq()))
                    .collect(Collectors.groupingBy(row -> Objects.hashCode((row.getString(1)))));
    return sparkPlatform.createStreamingDataFrame(sqlContext, new SimpleRDD(sqlContext.sparkContext(), current),
            DATA_SCHEMA);

}

From source file:org.codice.ddf.configuration.migration.ImportMigrationManagerImplTest.java

@Test
public void testConstructorWithInvalidReport() throws Exception {
    final MigrationReportImpl report = new MigrationReportImpl(MigrationOperation.EXPORT, Optional.empty());

    thrown.expect(IllegalArgumentException.class);
    thrown.expectMessage(Matchers.containsString("invalid migration operation"));

    new ImportMigrationManagerImpl(report, exportFile, Stream.empty(), zip);
}

From source file:org.codice.ddf.configuration.migration.ImportMigrationManagerImplTest.java

@Test
public void testConstructorWithNullExportFile() throws Exception {
    thrown.expect(IllegalArgumentException.class);
    thrown.expectMessage(Matchers.containsString("null export file"));

    new ImportMigrationManagerImpl(report, null, Stream.empty());
}

From source file:com.ethercamp.harmony.service.WalletService.java

public WalletInfoDTO getWalletInfo() {
    BigInteger gasPrice = BigInteger.valueOf(ethereum.getGasPrice());
    BigInteger txFee = gasLimit.multiply(gasPrice);

    List<WalletAddressDTO> list = addresses.entrySet().stream().flatMap(e -> {
        final String hexAddress = e.getKey();
        try {/*  w w w .  j  a  v a  2  s .c  o  m*/
            final byte[] address = Hex.decode(hexAddress);
            final BigInteger balance = repository.getBalance(address);
            final BigInteger sendBalance = calculatePendingChange(pendingSendTransactions, hexAddress, txFee);
            final BigInteger receiveBalance = calculatePendingChange(pendingReceiveTransactions, hexAddress,
                    BigInteger.ZERO);

            return Stream.of(new WalletAddressDTO(e.getValue(), e.getKey(), balance,
                    receiveBalance.subtract(sendBalance), keystore.hasStoredKey(e.getKey())));
        } catch (Exception exception) {
            log.error("Error in making wallet address " + hexAddress, exception);
            return Stream.empty();
        }
    }).collect(Collectors.toList());

    BigInteger totalAmount = list.stream().map(t -> t.getAmount()).reduce(BigInteger.ZERO,
            (state, amount) -> state.add(amount));

    WalletInfoDTO result = new WalletInfoDTO(totalAmount);

    result.getAddresses().addAll(list);
    return result;
}

From source file:alfio.manager.WaitingQueueManager.java

private Stream<Triple<WaitingQueueSubscription, TicketReservationWithOptionalCodeModification, ZonedDateTime>> handlePreReservation(
        Event event, int waitingPeople) {
    List<TicketCategory> ticketCategories = ticketCategoryRepository.findAllTicketCategories(event.getId());
    // Given that this Job runs more than once in a minute, in order to ensure that all the waiting queue subscribers would get a seat *before*
    // all other people, we must process their a little bit before the sale period starts
    Optional<TicketCategory> categoryWithInceptionInFuture = ticketCategories.stream()
            .sorted(TicketCategory.COMPARATOR).findFirst().filter(t -> ZonedDateTime.now(event.getZoneId())
                    .isBefore(t.getInception(event.getZoneId()).minusMinutes(5)));
    int ticketsNeeded = Math.min(waitingPeople, eventRepository.countExistingTickets(event.getId()));
    if (ticketsNeeded > 0) {
        preReserveIfNeeded(event, ticketsNeeded);
        if (!categoryWithInceptionInFuture.isPresent()) {
            return distributeAvailableSeats(event, Ticket.TicketStatus.PRE_RESERVED, () -> ticketsNeeded);
        }/*from  ww  w.ja  va2s.c  o  m*/
    }
    return Stream.empty();
}

From source file:org.apache.tinkerpop.gremlin.neo4j.structure.Neo4jGraph.java

@Override
public Iterator<Vertex> vertices(final Object... vertexIds) {
    this.tx().readWrite();
    final Predicate<Neo4jNode> nodePredicate = this.trait.getNodePredicate();
    if (0 == vertexIds.length) {
        return IteratorUtils.stream(this.getBaseGraph().allNodes()).filter(nodePredicate)
                .map(node -> (Vertex) new Neo4jVertex(node, this)).iterator();
    } else {//from   w  w  w.  ja  va  2 s  .c o  m
        ElementHelper.validateMixedElementIds(Vertex.class, vertexIds);
        return Stream.of(vertexIds).map(id -> {
            if (id instanceof Number)
                return ((Number) id).longValue();
            else if (id instanceof String)
                return Long.valueOf(id.toString());
            else if (id instanceof Vertex) {
                return (Long) ((Vertex) id).id();
            } else
                throw new IllegalArgumentException("Unknown vertex id type: " + id);
        }).flatMap(id -> {
            try {
                return Stream.of(this.baseGraph.getNodeById(id));
            } catch (final RuntimeException e) {
                if (Neo4jHelper.isNotFound(e))
                    return Stream.empty();
                throw e;
            }
        }).filter(nodePredicate).map(node -> (Vertex) new Neo4jVertex(node, this)).iterator();
    }
}

From source file:com.intuit.wasabi.repository.cassandra.impl.CassandraPagesRepository.java

@Override
public List<PageExperiment> getExperiments(Application.Name applicationName, Page.Name pageName) {
    Stream<PageExperimentByAppNamePage> resultList = Stream.empty();
    try {/*from   www .j  a v  a2 s .c o  m*/
        Result<PageExperimentByAppNamePage> result = pageExperimentIndexAccessor
                .selectBy(applicationName.toString(), pageName.toString());
        resultList = StreamSupport
                .stream(Spliterators.spliteratorUnknownSize(result.iterator(), Spliterator.ORDERED), false);
    } catch (ReadTimeoutException | UnavailableException | NoHostAvailableException e) {
        throw new RepositoryException(
                new StringBuilder("Could not retrieve the experiments for applicationName:\"")
                        .append(applicationName).append("\", page:\"").append(pageName).append("\"").toString(),
                e);
    }
    //TODO: make the experiment label part of the pageExperimentIndex to save a query per page
    return resultList.map(t -> {
        Optional<com.intuit.wasabi.repository.cassandra.pojo.Experiment> experiment = Optional
                .ofNullable(experimentAccessor.selectBy(t.getExperimentId()).one());
        PageExperiment.Builder builder = new PageExperiment.Builder(Experiment.ID.valueOf(t.getExperimentId()),
                null, t.isAssign());
        if (experiment.isPresent()) {
            builder.setLabel(Experiment.Label.valueOf(experiment.get().getLabel()));
        }
        return builder.build();
    }).filter(t -> t.getLabel() != null).collect(Collectors.toList());
}

From source file:org.hawkular.metrics.clients.ptrans.fullstack.CollectdITest.java

private List<Point> getServerData() throws Exception {
    ObjectMapper objectMapper = new ObjectMapper();

    HttpURLConnection urlConnection = (HttpURLConnection) new URL(findNumericMetricsUrl).openConnection();
    urlConnection.connect();//from  w  w  w . j av a2 s  . c o m
    int responseCode = urlConnection.getResponseCode();
    if (responseCode != HttpURLConnection.HTTP_OK) {
        String msg = "Could not get metrics list from server: %s, %d";
        fail(String.format(Locale.ROOT, msg, findNumericMetricsUrl, responseCode));
    }
    List<String> metricNames;
    try (InputStream inputStream = urlConnection.getInputStream()) {
        TypeFactory typeFactory = objectMapper.getTypeFactory();
        CollectionType valueType = typeFactory.constructCollectionType(List.class, MetricName.class);
        List<MetricName> value = objectMapper.readValue(inputStream, valueType);
        metricNames = value.stream().map(MetricName::getId).collect(toList());
    }

    Stream<Point> points = Stream.empty();

    for (String metricName : metricNames) {
        String[] split = metricName.split("\\.");
        String type = split[split.length - 1];

        urlConnection = (HttpURLConnection) new URL(findNumericDataUrl(metricName)).openConnection();
        urlConnection.connect();
        responseCode = urlConnection.getResponseCode();
        if (responseCode != HttpURLConnection.HTTP_OK) {
            fail("Could not load metric data from server: " + responseCode);
        }

        try (InputStream inputStream = urlConnection.getInputStream()) {
            TypeFactory typeFactory = objectMapper.getTypeFactory();
            CollectionType valueType = typeFactory.constructCollectionType(List.class, MetricData.class);
            List<MetricData> data = objectMapper.readValue(inputStream, valueType);
            Stream<Point> metricPoints = data.stream()
                    .map(metricData -> new Point(type, metricData.timestamp, metricData.value));
            points = Stream.concat(points, metricPoints);
        }
    }

    return points.sorted(Comparator.comparing(Point::getType).thenComparing(Point::getTimestamp))
            .collect(toList());
}