Example usage for org.springframework.data.domain Page getContent

List of usage examples for org.springframework.data.domain Page getContent

Introduction

In this page you can find the example usage for org.springframework.data.domain Page getContent.

Prototype

List<T> getContent();

Source Link

Document

Returns the page content as List .

Usage

From source file:com.company.project.data.elasticsearch.service.UserESServiceTest.java

public void testRepositoryQueries() {
    System.out.println("testRepository");

    //elasticsearchTemplate.deleteIndex("xuseridx");
    elasticsearchTemplate.deleteIndex(User.class);
    elasticsearchTemplate.createIndex(User.class);
    //elasticsearchTemplate.createIndex(User.class, user);
    //elasticsearchTemplate.createIndex("xuseridx");

    elasticsearchTemplate.putMapping(User.class);
    elasticsearchTemplate.refresh(User.class, true);

    String id = UUID.randomUUID().toString();
    User user = new User();
    user.setId(id);//from   w  ww .j  a v a2  s . c o m
    user.setName("user-" + id);
    user.setRole(1l);
    user.setPath("1.0");
    Map<Integer, Collection<String>> userFilter = new HashMap<>();
    userFilter.put(1, Arrays.asList("filter1", "filter2"));
    userFilter.put(2, Arrays.asList("filter11", "filter12"));
    user.setFilter(userFilter);
    userRepository.save(user);

    boolean exists = userRepository.exists(user.getId());
    assertTrue(exists);

    List<User> users = new ArrayList<>();
    id = UUID.randomUUID().toString();
    user = new User();
    user.setId(id);
    user.setName("user-" + id);
    user.setRole(2l);
    user.setPath("1.1.0");
    userFilter = new HashMap<>();
    userFilter.put(1, Arrays.asList("filter1", "filter3"));
    userFilter.put(2, Arrays.asList("filter11", "filter13"));
    user.setFilter(userFilter);
    users.add(user);
    //userRepository.save(user);
    id = UUID.randomUUID().toString();
    user = new User();
    user.setId(id);
    user.setName("user-" + id);
    user.setRole(3l);
    user.setPath("1.1.1.0");
    userFilter = new HashMap<>();
    userFilter.put(1, Arrays.asList("filter1", "filter4"));
    userFilter.put(2, Arrays.asList("filter11", "filter14"));
    user.setFilter(userFilter);
    users.add(user);
    //userRepository.save(user);
    userRepository.save(users); //bulk save
    exists = userRepository.exists(user.getId());
    assertTrue(exists);

    elasticsearchTemplate.refresh(User.class, true);

    GetQuery getQuery = new GetQuery();
    getQuery.setId(id);
    User userIndexed = elasticsearchTemplate.queryForObject(getQuery, User.class);
    assertEquals("user-" + id, "" + userIndexed.getName());
    assertEquals(id, "" + userIndexed.getId());
    assertEquals("3", "" + userIndexed.getRole());

    String facetName = "testName";
    SearchQuery searchQuery = new NativeSearchQueryBuilder().withQuery(matchAllQuery())
            .withFacet(new TermFacetRequestBuilder(facetName).fields("role").descCount().build()).build();

    // when
    FacetedPage<User> result = elasticsearchTemplate.queryForPage(searchQuery, User.class);
    assertEquals(3, result.getNumberOfElements());
    TermResult facet = (TermResult) result.getFacet(facetName);
    assertEquals(3, facet.getTerms().size());
    for (Term term : facet.getTerms()) {
        assertTrue("1".equals(term.getTerm()) || "2".equals(term.getTerm()) || "3".equals(term.getTerm()));
        System.out.println("Facet term : " + term.getTerm());
        //System.out.println("Facet count : " + term.getCount());
        //result
        //Facet term : 3
        //Facet term : 2
        //Facet term : 1
    }

    // query list all
    searchQuery = new NativeSearchQueryBuilder().withQuery(matchAllQuery())
            //.withFacet(new TermFacetRequestBuilder(facetName).fields("role").descCount().build())
            .build();
    List<User> userList = elasticsearchTemplate.queryForList(searchQuery, User.class);
    assertEquals(3, userList.size());
    for (User u : userList) {
        System.out.println("User ID: " + u.getId());
        System.out.println("User Name: " + u.getName());
    }

    // query list all with pagination
    searchQuery = new NativeSearchQueryBuilder().withQuery(matchAllQuery()).withPageable(new PageRequest(0, 10))
            //.withIndices("xuseridx")
            //.withTypes("xusertype")
            //.withFacet(new TermFacetRequestBuilder(facetName).fields("role").descCount().build())
            .build();
    Page<User> pagedUsers = elasticsearchTemplate.queryForPage(searchQuery, User.class);
    assertEquals(3, pagedUsers.getTotalElements());
    assertEquals(1, pagedUsers.getTotalPages());
    for (User u : pagedUsers.getContent()) {
        System.out.println("User ID: " + u.getId());
        System.out.println("User Name: " + u.getName());
    }

    // query filter with key "1" contains value "filter3" and key "2" contains value "filter13"
    QueryBuilder builder = nestedQuery("filter",
            boolQuery().must(termQuery("1", "filter3")).must(termQuery("2", "filter13")));
    searchQuery = new NativeSearchQueryBuilder().withQuery(builder).build();
    userList = elasticsearchTemplate.queryForList(searchQuery, User.class);
    assertEquals(1, userList.size());
    for (User u : userList) {
        System.out.println("User ID: " + u.getId());
        System.out.println("User Name: " + u.getName());
    }

    // query all with filter path prefix "1.1"
    builder = boolQuery().must(prefixQuery("path", "1.1"));
    searchQuery = new NativeSearchQueryBuilder().withQuery(builder).build();
    userList = elasticsearchTemplate.queryForList(searchQuery, User.class);
    assertEquals(2, userList.size());
    for (User u : userList) {
        System.out.println("User ID: " + u.getId());
        System.out.println("User Name: " + u.getName());
    }

    builder = QueryBuilders.multiMatchQuery("user", // term to search
            "path", "name" // field to search
    );
    searchQuery = new NativeSearchQueryBuilder().withQuery(builder).build();
    userList = elasticsearchTemplate.queryForList(searchQuery, User.class);
    assertEquals(3, userList.size());
    for (User u : userList) {
        System.out.println("User ID: " + u.getId());
        System.out.println("User Name: " + u.getName());
    }

    // ref: https://github.com/elasticsearch/elasticsearch/blob/master/docs/java-api/query-dsl-queries.asciidoc#boolean-query
    // http://www.elasticsearch.org/guide/en/elasticsearch/client/java-api/current/query-dsl-queries.html
    // Boolean Query
    builder = boolQuery().must(termQuery("content", "test1")) // field name, keyword
            .must(termQuery("content", "test4")) // field name, keyword
            .mustNot(termQuery("content", "test2")) // field name, keyword
            .should(termQuery("content", "test3")); // field name, keyword
    // Boosting Query
    builder = QueryBuilders.boostingQuery().positive(termQuery("name", "kimchy")) // query that will promote documents
            .negative(termQuery("name", "dadoonet")) // query that will demote documents
            .negativeBoost(0.2f); // negative boost
    //IDs Query
    builder = QueryBuilders.idsQuery().ids("1", "2");
    // Constant Score Query
    builder = QueryBuilders.constantScoreQuery(termFilter("name", "kimchy") // you can use a filter
    ).boost(2.0f); // filter score
    builder = QueryBuilders.constantScoreQuery(termQuery("name", "kimchy") // you can use a query
    ).boost(2.0f);
    // Prefix Query
    builder = QueryBuilders.prefixQuery("brand", // field
            "heine" // term
    );
    // QueryString Query
    builder = QueryBuilders.queryString("+kimchy -elasticsearch");
    // Range Query
    builder = QueryBuilders.rangeQuery("price") // field
            .from(5) // from
            .to(10) // to
            .includeLower(true) // include lower value means that from is gt when false or gte when true
            .includeUpper(false); // include upper value means that to is lt when false or lte when true

    builder = QueryBuilders.disMaxQuery() // add your queries
            .add(termQuery("name", "kimchy")) // add your queries
            .add(termQuery("name", "elasticsearch")) // 
            .boost(1.2f) // boost factor
            .tieBreaker(0.7f); // tie breaker
    // Fuzzy Like This (Field) Query (flt and flt_field)
    builder = QueryBuilders.fuzzyLikeThisQuery("name.first", "name.last") // fields
            .likeText("text like this one") // text
            .maxQueryTerms(12); // max num of Terms in generated queries
    //FuzzyQuery
    builder = QueryBuilders.fuzzyQuery("name", // field
            "kimzhy" // term
    );
}

From source file:com.luna.showcase.excel.service.ExcelDataService.java

/**
 * ???//  w w w.  ja  v  a  2 s  . co  m
 * excel 2007 ?sheet1048576
 * @param user
 * @param contextRootPath
 * @param searchable
 */
@Async
public void exportExcel2007(final User user, final String contextRootPath, final Searchable searchable) {

    int rowAccessWindowSize = 1000; //???
    int perSheetRows = 100000; //?sheet 10w?
    int totalRows = 0; //
    Long maxId = 0L;//??id 

    String fileName = generateFilename(user, contextRootPath, "xlsx");
    File file = new File(fileName);
    BufferedOutputStream out = null;
    SXSSFWorkbook wb = null;
    try {
        long beginTime = System.currentTimeMillis();

        wb = new SXSSFWorkbook(rowAccessWindowSize);
        wb.setCompressTempFiles(true);//?gzip

        while (true) {

            Sheet sheet = wb.createSheet();
            Row headerRow = sheet.createRow(0);
            Cell idHeaderCell = headerRow.createCell(0);
            idHeaderCell.setCellValue("?");
            Cell contentHeaderCell = headerRow.createCell(1);
            contentHeaderCell.setCellValue("");

            totalRows = 1;

            Page<ExcelData> page = null;

            do {
                searchable.setPage(0, pageSize);
                //
                if (!searchable.containsSearchKey("id_in")) {
                    searchable.addSearchFilter("id", SearchOperator.gt, maxId);
                }
                page = findAll(searchable);

                for (ExcelData data : page.getContent()) {
                    Row row = sheet.createRow(totalRows);
                    Cell idCell = row.createCell(0);
                    idCell.setCellValue(data.getId());
                    Cell contentCell = row.createCell(1);
                    contentCell.setCellValue(data.getContent());
                    maxId = Math.max(maxId, data.getId());
                    totalRows++;
                }
                //clear entity manager
                RepositoryHelper.clear();
            } while (page.hasNextPage() && totalRows <= perSheetRows);

            if (!page.hasNextPage()) {
                break;
            }
        }
        out = new BufferedOutputStream(new FileOutputStream(file));
        wb.write(out);

        IOUtils.closeQuietly(out);

        if (needCompress(file)) {
            fileName = compressAndDeleteOriginal(fileName);
        }

        long endTime = System.currentTimeMillis();

        Map<String, Object> context = Maps.newHashMap();
        context.put("seconds", (endTime - beginTime) / 1000);
        context.put("url", fileName.replace(contextRootPath, ""));
        notificationApi.notify(user.getId(), "excelExportSuccess", context);
    } catch (Exception e) {
        IOUtils.closeQuietly(out);
        log.error("excel export error", e);
        Map<String, Object> context = Maps.newHashMap();
        context.put("error", e.getMessage());
        notificationApi.notify(user.getId(), "excelExportError", context);
    } finally {
        // ?
        wb.dispose();
    }
}

From source file:com.netflix.genie.core.jpa.services.JpaClusterServiceImplIntegrationTests.java

/**
 * Test the get clusters method with descending sort.
 *//*from  w  w w . j  av a2s. c o  m*/
@Test
public void testGetClustersDescending() {
    //Default to order by Updated
    final Page<Cluster> clusters = this.service.getClusters(null, null, null, null, null, PAGE);
    Assert.assertEquals(2, clusters.getNumberOfElements());
    Assert.assertEquals(CLUSTER_2_ID,
            clusters.getContent().get(0).getId().orElseThrow(IllegalArgumentException::new));
    Assert.assertEquals(CLUSTER_1_ID,
            clusters.getContent().get(1).getId().orElseThrow(IllegalArgumentException::new));
}

From source file:com.netflix.genie.core.jpa.services.JpaClusterServiceImplIntegrationTests.java

/**
 * Test the get clusters method order by name.
 *//*from   ww w  .  j a  v  a2s . co  m*/
@Test
public void testGetClustersOrderBysUser() {
    final Pageable userPage = new PageRequest(0, 10, Sort.Direction.DESC, "user");
    final Page<Cluster> clusters = this.service.getClusters(null, null, null, null, null, userPage);
    Assert.assertEquals(2, clusters.getNumberOfElements());
    Assert.assertEquals(CLUSTER_1_ID,
            clusters.getContent().get(0).getId().orElseThrow(IllegalArgumentException::new));
    Assert.assertEquals(CLUSTER_2_ID,
            clusters.getContent().get(1).getId().orElseThrow(IllegalArgumentException::new));
}

From source file:com.netflix.genie.core.jpa.services.JpaClusterServiceImplIntegrationTests.java

/**
 * Test the get clusters method order by a collection field should return the order by default value (updated).
 *///w  w w  . j a  va 2  s . c om
@Ignore
@Test
public void testGetClustersOrderBysCollectionField() {
    final Pageable tagPage = new PageRequest(0, 10, Sort.Direction.DESC, "tags");
    final Page<Cluster> clusters = this.service.getClusters(null, null, null, null, null, tagPage);
    Assert.assertEquals(2, clusters.getNumberOfElements());
    Assert.assertEquals(CLUSTER_2_ID,
            clusters.getContent().get(0).getId().orElseThrow(IllegalArgumentException::new));
    Assert.assertEquals(CLUSTER_1_ID,
            clusters.getContent().get(1).getId().orElseThrow(IllegalArgumentException::new));
}

From source file:com.netflix.genie.core.jpa.services.JpaClusterServiceImplIntegrationTests.java

/**
 * Test the get clusters method.//from  w  w w  .j  a  va  2s . c  o m
 */
@Test
public void testGetClustersByStatuses() {
    final Set<ClusterStatus> statuses = EnumSet.noneOf(ClusterStatus.class);
    statuses.add(ClusterStatus.UP);
    final Page<Cluster> clusters = this.service.getClusters(null, statuses, null, null, null, PAGE);
    Assert.assertEquals(2, clusters.getNumberOfElements());
    Assert.assertEquals(CLUSTER_2_ID,
            clusters.getContent().get(0).getId().orElseThrow(IllegalArgumentException::new));
    Assert.assertEquals(CLUSTER_1_ID,
            clusters.getContent().get(1).getId().orElseThrow(IllegalArgumentException::new));
}

From source file:com.daphne.es.showcase.excel.service.ExcelDataService.java

/**
 * workbook/* w  w w .  j  a  v a 2 s  .c  om*/
 * 1?vbs ?
 * 2?c#??
 * ? ????office 2007 ?
 * @param user
 * @param contextRootPath
 * @param searchable
 */
@Async
public void exportExcel2003WithOneSheetPerWorkBook(final User user, final String contextRootPath,
        final Searchable searchable) {
    int workbookCount = 0;
    List<String> workbookFileNames = new ArrayList<String>();
    int perSheetRows = 60000; //?sheet 6w?
    int totalRows = 0;
    String extension = "xls";

    int pageSize = 1000;
    Long maxId = 0L;

    BufferedOutputStream out = null;
    try {
        long beginTime = System.currentTimeMillis();

        while (true) {
            workbookCount++;
            String fileName = generateFilename(user, contextRootPath, workbookCount, extension);
            workbookFileNames.add(fileName);
            File file = new File(fileName);

            HSSFWorkbook wb = new HSSFWorkbook();
            Sheet sheet = wb.createSheet();
            Row headerRow = sheet.createRow(0);
            Cell idHeaderCell = headerRow.createCell(0);
            idHeaderCell.setCellValue("?");
            Cell contentHeaderCell = headerRow.createCell(1);
            contentHeaderCell.setCellValue("");

            totalRows = 1;

            Page<ExcelData> page = null;

            do {
                searchable.setPage(0, pageSize);
                //
                if (!searchable.containsSearchKey("id_in")) {
                    searchable.addSearchFilter("id", SearchOperator.gt, maxId);
                }
                page = findAll(searchable);

                for (ExcelData data : page.getContent()) {
                    Row row = sheet.createRow(totalRows);
                    Cell idCell = row.createCell(0);
                    idCell.setCellValue(data.getId());
                    Cell contentCell = row.createCell(1);
                    contentCell.setCellValue(data.getContent());
                    maxId = Math.max(maxId, data.getId());
                    totalRows++;
                }
                //clear entity manager
                RepositoryHelper.clear();
            } while (page.hasNext() && totalRows <= perSheetRows);

            out = new BufferedOutputStream(new FileOutputStream(file));
            wb.write(out);

            IOUtils.closeQuietly(out);

            if (!page.hasNext()) {
                break;
            }
        }

        String fileName = workbookFileNames.get(0);
        if (workbookCount > 1 || needCompress(new File(fileName))) {
            fileName = fileName.substring(0, fileName.lastIndexOf("_")) + ".zip";
            //
            compressAndDeleteOriginal(fileName, workbookFileNames.toArray(new String[0]));
        } else {
            String newFileName = fileName.substring(0, fileName.lastIndexOf("_")) + "." + extension;
            FileUtils.moveFile(new File(fileName), new File(newFileName));
            fileName = newFileName;
        }

        long endTime = System.currentTimeMillis();

        Map<String, Object> context = Maps.newHashMap();
        context.put("seconds", (endTime - beginTime) / 1000);
        context.put("url", fileName.replace(contextRootPath, ""));
        notificationApi.notify(user.getId(), "excelExportSuccess", context);
    } catch (Exception e) {
        e.printStackTrace();
        //
        IOUtils.closeQuietly(out);
        log.error("excel export error", e);
        Map<String, Object> context = Maps.newHashMap();
        context.put("error", e.getMessage());
        notificationApi.notify(user.getId(), "excelExportError", context);
    }
}

From source file:com.netflix.genie.core.jpa.services.JpaClusterServiceImplIntegrationTests.java

/**
 * Test the get clusters method./* w w w.  j av a2 s . c  o  m*/
 */
@Test
public void testGetClustersByName() {
    final Page<Cluster> clusters = this.service.getClusters(CLUSTER_2_NAME, null, null, null, null, PAGE);
    Assert.assertEquals(1, clusters.getNumberOfElements());
    Assert.assertEquals(CLUSTER_2_ID,
            clusters.getContent().get(0).getId().orElseThrow(IllegalArgumentException::new));
}

From source file:com.luna.showcase.excel.service.ExcelDataService.java

/**
 * workbook// ww  w. j  a va  2  s.  com
 * 1?vbs ?
 * 2?c#??
 * ? ????office 2007 ?
 * @param user
 * @param contextRootPath
 * @param searchable
 */
@Async
public void exportExcel2003WithOneSheetPerWorkBook(final User user, final String contextRootPath,
        final Searchable searchable) {
    int workbookCount = 0;
    List<String> workbookFileNames = new ArrayList<String>();
    int perSheetRows = 60000; //?sheet 6w?
    int totalRows = 0;
    String extension = "xls";

    int pageSize = 1000;
    Long maxId = 0L;

    BufferedOutputStream out = null;
    try {
        long beginTime = System.currentTimeMillis();

        while (true) {
            workbookCount++;
            String fileName = generateFilename(user, contextRootPath, workbookCount, extension);
            workbookFileNames.add(fileName);
            File file = new File(fileName);

            HSSFWorkbook wb = new HSSFWorkbook();
            Sheet sheet = wb.createSheet();
            Row headerRow = sheet.createRow(0);
            Cell idHeaderCell = headerRow.createCell(0);
            idHeaderCell.setCellValue("?");
            Cell contentHeaderCell = headerRow.createCell(1);
            contentHeaderCell.setCellValue("");

            totalRows = 1;

            Page<ExcelData> page = null;

            do {
                searchable.setPage(0, pageSize);
                //
                if (!searchable.containsSearchKey("id_in")) {
                    searchable.addSearchFilter("id", SearchOperator.gt, maxId);
                }
                page = findAll(searchable);

                for (ExcelData data : page.getContent()) {
                    Row row = sheet.createRow(totalRows);
                    Cell idCell = row.createCell(0);
                    idCell.setCellValue(data.getId());
                    Cell contentCell = row.createCell(1);
                    contentCell.setCellValue(data.getContent());
                    maxId = Math.max(maxId, data.getId());
                    totalRows++;
                }
                //clear entity manager
                RepositoryHelper.clear();
            } while (page.hasNextPage() && totalRows <= perSheetRows);

            out = new BufferedOutputStream(new FileOutputStream(file));
            wb.write(out);

            IOUtils.closeQuietly(out);

            if (!page.hasNextPage()) {
                break;
            }
        }

        String fileName = workbookFileNames.get(0);
        if (workbookCount > 1 || needCompress(new File(fileName))) {
            fileName = fileName.substring(0, fileName.lastIndexOf("_")) + ".zip";
            //
            compressAndDeleteOriginal(fileName, workbookFileNames.toArray(new String[0]));
        } else {
            String newFileName = fileName.substring(0, fileName.lastIndexOf("_")) + "." + extension;
            FileUtils.moveFile(new File(fileName), new File(newFileName));
            fileName = newFileName;
        }

        long endTime = System.currentTimeMillis();

        Map<String, Object> context = Maps.newHashMap();
        context.put("seconds", (endTime - beginTime) / 1000);
        context.put("url", fileName.replace(contextRootPath, ""));
        notificationApi.notify(user.getId(), "excelExportSuccess", context);
    } catch (Exception e) {
        e.printStackTrace();
        //
        IOUtils.closeQuietly(out);
        log.error("excel export error", e);
        Map<String, Object> context = Maps.newHashMap();
        context.put("error", e.getMessage());
        notificationApi.notify(user.getId(), "excelExportError", context);
    }
}

From source file:com.netflix.genie.core.jpa.services.JpaClusterServiceImplIntegrationTests.java

/**
 * Test the get clusters method.//from w w  w . j  ava 2  s . com
 */
@Test
public void testGetClustersByMinUpdateTime() {
    final Calendar time = Calendar.getInstance();
    time.clear();
    time.set(2014, Calendar.JULY, 9, 2, 58, 59);
    final Page<Cluster> clusters = this.service.getClusters(null, null, null, time.getTime(), null, PAGE);
    Assert.assertEquals(1, clusters.getNumberOfElements());
    Assert.assertEquals(CLUSTER_2_ID,
            clusters.getContent().get(0).getId().orElseThrow(IllegalArgumentException::new));
}