Example usage for org.springframework.data.domain Page getContent

List of usage examples for org.springframework.data.domain Page getContent

Introduction

In this page you can find the example usage for org.springframework.data.domain Page getContent.

Prototype

List<T> getContent();

Source Link

Document

Returns the page content as List .

Usage

From source file:org.eclipse.hawkbit.repository.jpa.JpaTargetManagement.java

private static Page<Target> convertPage(final Page<JpaTarget> findAll, final Pageable pageable) {
    return new PageImpl<>(Collections.unmodifiableList(findAll.getContent()), pageable,
            findAll.getTotalElements());
}

From source file:org.eclipse.hawkbit.repository.jpa.TargetManagementTest.java

private void validateFoundTargetsByRsql(final String rsqlFilter, final String... controllerIds) {
    final Page<Target> foundTargetsByMetadataAndControllerId = targetManagement.findByRsql(PAGE, rsqlFilter);

    assertThat(foundTargetsByMetadataAndControllerId.getTotalElements())
            .as("Targets count in RSQL filter is wrong").isEqualTo(controllerIds.length);
    assertThat(foundTargetsByMetadataAndControllerId.getContent().stream().map(Target::getControllerId))
            .as("Targets found by RSQL filter have wrong controller ids")
            .containsExactlyInAnyOrder(controllerIds);
}

From source file:org.eclipse.hawkbit.ui.artifacts.details.ArtifactBeanQuery.java

@Override
protected List<Artifact> loadBeans(final int startIndex, final int count) {
    Page<Artifact> artifactBeans;
    if (startIndex == 0 && firstPagetArtifacts != null) {
        artifactBeans = firstPagetArtifacts;
    } else {//from  w ww  .  j  av a 2s.c om
        artifactBeans = getArtifactManagement()
                .findBySoftwareModule(new OffsetBasedPageRequest(startIndex, count, sort), baseSwModuleId);
    }

    return artifactBeans.getContent();
}

From source file:org.egov.api.controller.ComplaintController.java

/**
 * This will display the latest complaint except current user.
 *
 * @param page/*  w ww.  j  av a 2s .c o m*/
 * @param pageSize
 * @return Complaint
 */

@RequestMapping(value = ApiUrl.COMPLAINT_LATEST, method = GET, produces = TEXT_PLAIN_VALUE)
public ResponseEntity<String> getLatest(@PathVariable("page") int page,
        @PathVariable("pageSize") int pageSize) {

    if (page < 1)
        return getResponseHandler().error(INVALID_PAGE_NUMBER_ERROR);
    try {
        final Page<Complaint> pagelist = complaintService.getLatest(page, pageSize);
        final boolean hasNextPage = pagelist.getTotalElements() > page * pageSize;
        return getResponseHandler().putStatusAttribute(HAS_NEXT_PAGE, String.valueOf(hasNextPage))
                .setDataAdapter(new ComplaintAdapter()).success(pagelist.getContent());
    } catch (final Exception e) {
        LOGGER.error(EGOV_API_ERROR, e);
        return getResponseHandler().error(getMessage(SERVER_ERROR));
    }

}

From source file:org.egov.api.controller.ComplaintController.java

/**
 * This will returns complaint list of current user.
 *
 * @param page//from   www.  ja va  2  s. c  o  m
 * @param pageSize
 * @return Complaint
 */

@RequestMapping(value = ApiUrl.CITIZEN_GET_MY_COMPLAINT, method = GET, produces = TEXT_PLAIN_VALUE)
public ResponseEntity<String> getMyComplaint(@PathVariable("page") final int page,
        @PathVariable("pageSize") final int pageSize, @RequestParam(required = false) String complaintStatus) {

    if (page < 1)
        return getResponseHandler().error(INVALID_PAGE_NUMBER_ERROR);
    try {

        Page<Complaint> pagelist = null;
        boolean hasNextPage = false;
        if (isEmpty(complaintStatus) || complaintStatus.equals(PGRConstants.COMPLAINT_ALL)) {
            pagelist = complaintService.getMyComplaint(page, pageSize);
            hasNextPage = pagelist.getTotalElements() > page * pageSize;
        } else if (complaintStatus.equals(PGRConstants.COMPLAINT_PENDING)) {
            pagelist = complaintService.getMyPendingGrievances(page, pageSize);
            hasNextPage = pagelist.getTotalElements() > page * pageSize;
        } else if (complaintStatus.equals(PGRConstants.COMPLAINT_COMPLETED)) {
            pagelist = complaintService.getMyCompletedGrievances(page, pageSize);
            hasNextPage = pagelist.getTotalElements() > page * pageSize;
        } else if (complaintStatus.equals(PGRConstants.COMPLAINT_REJECTED)) {
            pagelist = complaintService.getMyRejectedGrievances(page, pageSize);
            hasNextPage = pagelist.getTotalElements() > page * pageSize;
        }

        if (pagelist == null)
            return getResponseHandler().error("Invalid Complaint Status!");
        else
            return getResponseHandler().putStatusAttribute(HAS_NEXT_PAGE, String.valueOf(hasNextPage))
                    .setDataAdapter(new ComplaintAdapter()).success(pagelist.getContent());

    } catch (final Exception e) {
        LOGGER.error(EGOV_API_ERROR, e);
        return getResponseHandler().error(getMessage(SERVER_ERROR));
    }

}

From source file:org.egov.pgr.web.controller.masters.escalationTime.ViewEscalationTimeController.java

public String commonSearchResult(final Integer pageNumber, final Integer pageSize, final Long complaintTypeId,
        final Long designationId) {

    final Page<Escalation> pageOfEscalation = escalationService.getPageOfEscalations(pageNumber, pageSize,
            complaintTypeId, designationId);
    final List<Escalation> positionList = pageOfEscalation.getContent();
    final StringBuilder complaintRouterJSONData = new StringBuilder();
    complaintRouterJSONData.append("{\"draw\": ").append("0");
    complaintRouterJSONData.append(",\"recordsTotal\":").append(pageOfEscalation.getTotalElements());
    complaintRouterJSONData.append(",\"totalDisplayRecords\":").append(pageSize);
    complaintRouterJSONData.append(",\"recordsFiltered\":").append(pageOfEscalation.getTotalElements());
    complaintRouterJSONData.append(",\"data\":").append(toJSON(positionList)).append("}");
    return complaintRouterJSONData.toString();
}

From source file:org.fao.geonet.OgpAppHandler.java

private void fillCaches(final ServiceContext context) {
    final Format formatService = context.getBean(Format.class); // this will initialize the formatter

    Thread fillCaches = new Thread(new Runnable() {
        @Override/*www  . j a  v  a  2  s.c o m*/
        public void run() {
            final ServletContext servletContext = context.getServlet().getServletContext();
            context.setAsThreadLocal();
            ApplicationContextHolder.set(_applicationContext);
            GeonetWro4jFilter filter = (GeonetWro4jFilter) servletContext
                    .getAttribute(GeonetWro4jFilter.GEONET_WRO4J_FILTER_KEY);

            @SuppressWarnings("unchecked")
            List<String> wro4jUrls = _applicationContext.getBean("wro4jUrlsToInitialize", List.class);

            for (String wro4jUrl : wro4jUrls) {
                Log.info(Geonet.GEONETWORK, "Initializing the WRO4J group: " + wro4jUrl + " cache");
                final MockHttpServletRequest servletRequest = new MockHttpServletRequest(servletContext, "GET",
                        "/static/" + wro4jUrl);
                final MockHttpServletResponse response = new MockHttpServletResponse();
                try {
                    filter.doFilter(servletRequest, response, new MockFilterChain());
                } catch (Throwable t) {
                    Log.info(Geonet.GEONETWORK,
                            "Error while initializing the WRO4J group: " + wro4jUrl + " cache", t);
                }
            }

            final Page<Metadata> metadatas = _applicationContext.getBean(MetadataRepository.class)
                    .findAll(new PageRequest(0, 1));
            if (metadatas.getNumberOfElements() > 0) {
                Integer mdId = metadatas.getContent().get(0).getId();
                context.getUserSession().loginAs(
                        new User().setName("admin").setProfile(Profile.Administrator).setUsername("admin"));
                @SuppressWarnings("unchecked")
                List<String> formattersToInitialize = _applicationContext.getBean("formattersToInitialize",
                        List.class);

                for (String formatterName : formattersToInitialize) {
                    Log.info(Geonet.GEONETWORK, "Initializing the Formatter with id: " + formatterName);
                    final MockHttpServletRequest servletRequest = new MockHttpServletRequest(servletContext);
                    final MockHttpServletResponse response = new MockHttpServletResponse();
                    try {
                        formatService.exec("eng", FormatType.html.toString(), mdId.toString(), null,
                                formatterName, Boolean.TRUE.toString(), false, FormatterWidth._100,
                                new ServletWebRequest(servletRequest, response));
                    } catch (Throwable t) {
                        Log.info(Geonet.GEONETWORK,
                                "Error while initializing the Formatter with id: " + formatterName, t);
                    }
                }
            }
        }
    });
    fillCaches.setDaemon(true);
    fillCaches.setName("Fill Caches Thread");
    fillCaches.setPriority(Thread.MIN_PRIORITY);
    fillCaches.start();
}

From source file:org.jobscheduler.dashboard.web.rest.SchedulerHistoryResource.java

@RequestMapping("/schedulerHistories")
@ApiOperation(value = "Get list scheduler history")
public @ResponseBody ListDTO schedulerHistories(Model model, @RequestParam(value = "count") Integer count,
        @RequestParam(value = "page") Integer page, HttpServletRequest request)
        throws UnsupportedEncodingException {

    // Spring Data count from page 0, ngTable from page 1
    page--;//from   www .j a  v  a  2 s  .  c om
    Enumeration<String> parametersNames = request.getParameterNames();

    // Parameters
    String jobName = "%";
    String spoolerId = "%";
    BigDecimal error = null;

    DateTime startDT = DateTime.now().minusDays(100);
    DateTime endDT = DateTime.now();

    // For sorting
    List<Order> orders = new ArrayList<Order>();

    while (parametersNames.hasMoreElements()) {
        String parameterName = (String) parametersNames.nextElement();

        // Filtering
        if (parameterName.startsWith(Constant.PARAM_FILTER)) {
            String filter = request.getParameter(parameterName);
            String parameterFilter = parameterName.substring(parameterName.indexOf("[") + 1,
                    parameterName.indexOf("]"));
            String decodedFilter = URLDecoder.decode(filter, "UTF-8");
            if (parameterFilter.equals("jobName")) {
                jobName = "%" + decodedFilter + "%";
            }
            if (parameterFilter.equals("spoolerId")) {
                spoolerId = "%" + decodedFilter + "%";
            }
            if (parameterFilter.equals("error")) {
                error = new BigDecimal(filter);
            }
            if (parameterFilter.equals("startTime")) {
                startDT = DateTime.parse(decodedFilter, fmt);
            }
            if (parameterFilter.equals("endTime")) {
                endDT = DateTime.parse(decodedFilter, fmt);
            }

            log.info("Filter in get list history : " + parameterName + "=" + filter);
        }

        // Sorting
        if (parameterName.startsWith(Constant.PARAM_SORT)) {
            String directionParameter = request.getParameter(parameterName);
            String sortByColumnName = parameterName.substring(parameterName.indexOf("[") + 1,
                    parameterName.indexOf("]"));
            String direction = URLDecoder.decode(directionParameter, "UTF-8");
            orders.add(new Order(Direction.fromString(direction), sortByColumnName));
        }
    }

    PageRequest pageable;
    if (orders.size() == 0)
        pageable = new PageRequest(page, count);
    else
        pageable = new PageRequest(page, count, new Sort(orders));

    ListDTO dto = new ListDTO();

    Page<SchedulerHistory> scheduleJob;
    if (error != null)
        scheduleJob = schedulerHistoryRepository.findByStartTimeBetweenAndJobNameLikeAndSpoolerIdLikeAndError(
                new Timestamp(startDT.getMillis()), new Timestamp(endDT.getMillis()), jobName, spoolerId, error,
                pageable);

    else
        scheduleJob = schedulerHistoryRepository.findByStartTimeBetweenAndJobNameLikeAndSpoolerIdLike(
                new Timestamp(startDT.getMillis()), new Timestamp(endDT.getMillis()), jobName, spoolerId,
                pageable);

    dto.setResult(scheduleJob.getContent());
    dto.setTotalElements(scheduleJob.getTotalElements());
    dto.setTotalPages(scheduleJob.getTotalPages());

    return dto;
}

From source file:org.jtalks.jcommune.model.dao.search.hibernate.TopicHibernateSearchDaoTest.java

@Test
public void testSearchPaging() {
    int totalSize = 50;
    int pageCount = 2;
    int pageSize = totalSize / pageCount;
    String searchText = "JCommune";
    PageRequest pageRequest = new PageRequest("1", pageSize);
    List<Topic> topicList = PersistedObjectsFactory.createAndSaveTopicList(totalSize);
    for (Topic topic : topicList) {
        topic.setTitle(searchText);/*from  w  w  w  .  j  a v  a  2s  .  c om*/
    }

    saveAndFlushIndexes(topicList);
    configureMocks(searchText, searchText);

    Page<Topic> searchResultPage = topicSearchDao.searchByTitleAndContent(searchText, pageRequest,
            Arrays.asList(topicList.get(0).getBranch().getId()));

    assertEquals(searchResultPage.getContent().size(), pageSize, "Incorrect count of topics in one page.");
    assertEquals(searchResultPage.getTotalElements(), totalSize, "Incorrect total count.");
    assertEquals(searchResultPage.getTotalPages(), pageCount, "Incorrect count of pages.");

}

From source file:org.jtalks.jcommune.model.dao.search.hibernate.TopicHibernateSearchDaoTest.java

@Test(dataProvider = "parameterFullPhraseSearch")
public void testFullPhraseSearch(String content) {
    Topic expectedTopic = PersistedObjectsFactory.getDefaultTopic();
    expectedTopic.setTitle(content);/*from w  w  w .  ja va  2 s.com*/

    saveAndFlushIndexes(Arrays.asList(expectedTopic));
    configureMocks(content, content);

    Page<Topic> searchResultPage = topicSearchDao.searchByTitleAndContent(content, DEFAULT_PAGE_REQUEST,
            Arrays.asList(expectedTopic.getBranch().getId()));

    Assert.assertTrue(searchResultPage.hasContent(), "Search result must not be empty.");
    for (Topic topic : searchResultPage.getContent()) {
        Assert.assertEquals(expectedTopic.getTitle(), topic.getTitle(),
                "Content from the index should be the same as in the database.");
    }
}