Example usage for java.util.stream StreamSupport stream

List of usage examples for java.util.stream StreamSupport stream

Introduction

In this page you can find the example usage for java.util.stream StreamSupport stream.

Prototype

public static <T> Stream<T> stream(Spliterator<T> spliterator, boolean parallel) 

Source Link

Document

Creates a new sequential or parallel Stream from a Spliterator .

Usage

From source file:nu.yona.server.goals.service.ActivityCategoryService.java

private void assertNoDuplicateNames(Set<UUID> idsToSkip, Map<Locale, String> localizableName) {
    Iterable<ActivityCategory> allCategories = repository.findAll();
    List<ActivityCategory> categoriesToConsider = StreamSupport.stream(allCategories.spliterator(), false)
            .filter(c -> !idsToSkip.contains(c.getId())).collect(Collectors.toList());
    for (Entry<Locale, String> localeAndName : localizableName.entrySet()) {
        assertNoDuplicateNames(categoriesToConsider, localeAndName);
    }/*from  ww  w.  j  av  a2  s  .co m*/
}

From source file:io.mandrel.document.impl.ElasticsearchDocumentStore.java

@Override
public void byPages(int pageSize, Callback callback) {

    SearchResponse searchResponse = client.prepareSearch(index).setSize(pageSize).setFrom(0)
            .setScroll(new Scroll(TimeValue.timeValueMinutes(10))).get();
    boolean loop = true;
    try {//from   w w  w. j  a  v a2s  . c o  m
        while (loop) {
            loop = callback.on(StreamSupport.stream(searchResponse.getHits().spliterator(), true).map(mapper)
                    .collect(Collectors.toList()));
            searchResponse = client.searchScroll(Requests.searchScrollRequest(searchResponse.getScrollId()))
                    .actionGet();

            if (searchResponse.getHits().hits() == null) {
                break;
            }
        }
    } finally {
        client.prepareClearScroll().addScrollId(searchResponse.getScrollId()).execute();
    }
}

From source file:notaql.performance.PerformanceTest.java

private static String composeEngine(JSONObject engine) {
    final StringBuilder builder = new StringBuilder();
    builder.append(engine.get("engine"));
    builder.append("(");

    final String params = StreamSupport
            .stream(Spliterators.spliteratorUnknownSize(engine.keys(), Spliterator.ORDERED), false)
            .filter(k -> !k.equals("engine")).map(k -> k + " <- " + toArg(k, engine))
            .collect(Collectors.joining(", "));

    builder.append(params);/*from w w  w.  j ava  2  s. c  om*/
    builder.append(")");

    return builder.toString();
}

From source file:com.yoshio3.services.StorageService.java

public void deleteAll(String containerName) {
    try {//from w w w  . j a  v  a  2 s  . com
        CloudBlobContainer container = blobClient.getContainerReference(containerName);
        Iterable<ListBlobItem> items = container.listBlobs();
        Spliterator<ListBlobItem> spliterator = items.spliterator();
        Stream<ListBlobItem> stream = StreamSupport.stream(spliterator, false);

        stream.filter(item -> item instanceof CloudBlob).map(item -> (CloudBlob) item).forEach(blob -> {
            try {
                String name = blob.getName();

                CloudBlockBlob delFile;
                delFile = container.getBlockBlobReference(name);
                // Delete the blob.
                delFile.deleteIfExists();
            } catch (URISyntaxException | StorageException ex) {
                LOGGER.log(Level.SEVERE, null, ex);
            }
        });
    } catch (URISyntaxException | StorageException ex) {
        LOGGER.log(Level.SEVERE, null, ex);
    }
}

From source file:edu.cmu.cs.lti.discoursedb.io.prosolo.blog.converter.BlogConverter.java

@Override
public void run(String... args) throws Exception {
    if (args.length < 3) {
        logger.error(/* ww w  .  j ava2  s .c o  m*/
                "USAGE: BlogConverterApplication <DiscourseName> <DataSetName> <blogDump> <userMapping (optional)> <dumpIsWrappedInJsonArray (optional, default=false)>");
        throw new RuntimeException("Incorrect number of launch parameters.");

    }
    final String discourseName = args[0];

    final String dataSetName = args[1];
    if (dataSourceService.dataSourceExists(dataSetName)) {
        logger.warn("Dataset " + dataSetName + " has already been imported into DiscourseDB. Terminating...");
        return;
    }

    final String forumDumpFileName = args[2];
    File blogDumpFile = new File(forumDumpFileName);
    if (!blogDumpFile.exists() || !blogDumpFile.isFile() || !blogDumpFile.canRead()) {
        logger.error("Forum dump file does not exist or is not readable.");
        throw new RuntimeException("Can't read file " + forumDumpFileName);
    }

    //parse the optional fourth and fifth parameter
    String userMappingFileName = null;
    String jsonarray = null;
    if (args.length == 4) {
        if (args[3].equalsIgnoreCase("true") || args[3].equalsIgnoreCase("false")) {
            jsonarray = args[3];
        } else {
            userMappingFileName = args[3];
        }
    } else {
        if (args[3].equalsIgnoreCase("true") || args[3].equalsIgnoreCase("false")) {
            jsonarray = args[3];
            userMappingFileName = args[4];
        } else {
            jsonarray = args[4];
            userMappingFileName = args[3];
        }
    }

    //read the blog author to edX user mapping, if available
    if (userMappingFileName != null) {
        logger.trace("Reading user mapping from " + userMappingFileName);
        File userMappingFile = new File(userMappingFileName);
        if (!userMappingFile.exists() || !userMappingFile.isFile() || !userMappingFile.canRead()) {
            logger.error("User mappiong file does not exist or is not readable.");
            throw new RuntimeException("Can't read file " + userMappingFileName);
        }
        List<String> lines = FileUtils.readLines(userMappingFile);
        lines.remove(0); //remove header
        for (String line : lines) {
            String[] blogToedx = line.split(MAPPING_SEPARATOR);
            //if the current line contained a valid mapping, add it to the map
            if (blogToedx.length == 2 && blogToedx[0] != null && !blogToedx[0].isEmpty() && blogToedx[1] != null
                    && !blogToedx[1].isEmpty()) {
                blogToedxMap.put(blogToedx[0], blogToedx[1]);
            }
        }
    }

    if (jsonarray != null && jsonarray.equalsIgnoreCase(("true"))) {
        logger.trace("Set reader to expect a json array rather than regular json input.");
        this.dumpWrappedInJsonArray = true;
    }

    /*
     * Map data to DiscourseDB
     */

    logger.info("Mapping blog posts and comments to DiscourseDB");
    try (InputStream in = new FileInputStream(blogDumpFile)) {
        if (dumpWrappedInJsonArray) {
            //if the json dump is wrapped in a top-level array
            @SuppressWarnings("unchecked")
            List<ProsoloBlogPost> posts = (List<ProsoloBlogPost>) new ObjectMapper()
                    .readValues(new JsonFactory().createParser(in), new TypeReference<List<ProsoloBlogPost>>() {
                    }).next();
            posts.stream().forEach(p -> converterService.mapPost(p, discourseName, dataSetName, blogToedxMap));
        } else {
            //if the json dump is NOT wrapped in a top-level array
            Iterator<ProsoloBlogPost> pit = new ObjectMapper().readValues(new JsonFactory().createParser(in),
                    ProsoloBlogPost.class);
            Iterable<ProsoloBlogPost> iterable = () -> pit;
            StreamSupport.stream(iterable.spliterator(), false)
                    .forEach(p -> converterService.mapPost(p, discourseName, dataSetName, blogToedxMap));
        }
    }

    logger.info("All done.");
}

From source file:com.uber.hoodie.utilities.sources.KafkaSource.java

public KafkaSource(PropertiesConfiguration config, JavaSparkContext sparkContext, SourceDataFormat dataFormat,
        SchemaProvider schemaProvider) {
    super(config, sparkContext, dataFormat, schemaProvider);

    kafkaParams = new HashMap<>();
    Stream<String> keys = StreamSupport
            .stream(Spliterators.spliteratorUnknownSize(config.getKeys(), Spliterator.NONNULL), false);
    keys.forEach(k -> kafkaParams.put(k, config.getString(k)));

    UtilHelpers.checkRequiredProperties(config, Arrays.asList(Config.KAFKA_TOPIC_NAME));
    topicName = config.getString(Config.KAFKA_TOPIC_NAME);
}

From source file:io.divolte.server.filesinks.hdfs.FileFlusherLocalHdfsTest.java

private void verifyAvroFile(final List<Record> expected, final Schema schema, final Path avroFile) {
    final List<Record> result = StreamSupport
            .stream(readAvroFile(schema, avroFile.toFile()).spliterator(), false).collect(Collectors.toList());
    assertEquals(expected, result);// w w  w  . j ava2  s.  co m
}

From source file:com.baasbox.commands.LinksResource.java

@Override
protected JsonNode list(JsonNode command) throws CommandException {
    validateHasParams(command);/*  w ww. ja v  a2 s .c o m*/
    QueryParams params = QueryParams.getParamsFromJson(command.get(ScriptCommand.PARAMS).get(QUERY));

    List<ODocument> listOfLinks;
    try {
        listOfLinks = LinkService.getLink(params);
        String s = JSONFormats.prepareDocToJson(listOfLinks, JSONFormats.Formats.LINK);
        ArrayNode lst = (ArrayNode) Json.mapper().readTree(s);
        lst.forEach((j) -> ((ObjectNode) j).remove(TO_REMOVE).remove("@rid"));
        StreamSupport.stream(lst.spliterator(), false).flatMap(x -> Stream.of(x.get("in"), x.get("out")))
                .filter(x -> x instanceof ObjectNode)
                .forEach(x -> ((ObjectNode) x).remove(TO_REMOVE).remove("@rid"));

        return lst;
    } catch (SqlInjectionException | IOException e) {
        throw new CommandExecutionException(command, "error executing command: " + ExceptionUtils.getMessage(e),
                e);
    }
}

From source file:io.syndesis.dao.DeploymentDescriptorTest.java

@Test
public void thereShouldBeNoDuplicateNames() {
    final Map<String, Long> namesWithCount = StreamSupport.stream(deployment.spliterator(), true)
            .filter(data -> "connector".equals(data.get("kind").asText()))
            .flatMap(//  www  . j  a  va  2s  .  com
                    connector -> StreamSupport.stream(connector.get("data").get("actions").spliterator(), true))
            .map(action -> action.get("name").asText())
            .collect(Collectors.groupingBy(Function.identity(), Collectors.counting()));

    final Map<String, Long> multipleNames = namesWithCount.entrySet().stream().filter(e -> e.getValue() > 1)
            .collect(Collectors.toMap(Entry::getKey, Entry::getValue));

    assertThat(multipleNames).as("Expected unique action names").isEmpty();
}

From source file:fi.vrk.xroad.catalog.persistence.CatalogServiceTest.java

private void assertMemberAndSubsystemCounts(int members, int activeMembers, int subsystems,
        int activeSubsystems) {
    assertEquals(members, Iterables.size(catalogService.getAllMembers()));
    assertEquals(activeMembers, Iterables.size(catalogService.getActiveMembers()));
    assertEquals(subsystems, Iterables.size(subsystemRepository.findAll()));
    assertEquals(activeSubsystems, StreamSupport.stream(subsystemRepository.findAll().spliterator(), false)
            .filter(s -> !s.getStatusInfo().isRemoved()).count());
}