Example usage for org.springframework.data.domain Slice getContent

List of usage examples for org.springframework.data.domain Slice getContent

Introduction

In this page you can find the example usage for org.springframework.data.domain Slice getContent.

Prototype

List<T> getContent();

Source Link

Document

Returns the page content as List .

Usage

From source file:at.plechinger.minigeocode.controller.GeocodeController.java

@RequestMapping
public GeocodeResult getGeocodingSingle(@RequestParam(value = "q", required = true) String query) {
    Slice<GeocodeResult> result = geocodeRepository.findSlice(query,
            new PageRequest(0, 1, Sort.Direction.ASC, "street", "housenumber"));

    if (result != null && result.getContent() != null && !result.getContent().isEmpty()) {
        return result.getContent().get(0);
    }/*from  ww w.j a v  a  2  s  . c om*/
    return null;
}

From source file:at.plechinger.minigeocode.controller.ReverseGeocodeController.java

@RequestMapping()
public ReverseGeocodeResult getSingleGeocoding(@RequestParam(value = "lat", required = true) double latitude,
        @RequestParam(value = "lon", required = true) double longitude) {

    Slice<ReverseGeocodeResult> result = geocodeRepository.findReverseSlice(longitude, latitude,
            new PageRequest(0, 1, Sort.Direction.ASC, "distance"));

    if (result != null && result.getContent() != null && !result.getContent().isEmpty()) {
        return result.getContent().get(0);
    }/*w  w w .  j  ava  2 s . c  o m*/
    return null;
}

From source file:com.netflix.genie.core.jpa.services.JpaJobPersistenceServiceImpl.java

/**
 * {@inheritDoc}//from  ww w.j a v  a  2 s  .  c o m
 */
@Override
public long deleteBatchOfJobsCreatedBeforeDate(@NotNull final Date date, @Min(1) final int maxDeleted,
        @Min(1) final int pageSize) {
    log.info("Attempting to delete batch of jobs (at most {}) created before {} ms from epoch", maxDeleted,
            date.getTime());
    long jobExecutionsDeleted = 0;
    long jobMetadatasDeleted = 0;
    long jobsDeleted = 0;
    long jobRequestsDeleted = 0;
    long totalAttemptedDeletions = 0;
    final Pageable page = new PageRequest(0, pageSize);
    Slice<IdProjection> idProjections;
    do {
        idProjections = this.jobRequestRepo.findByCreatedBefore(date, page);
        if (idProjections.hasContent()) {
            final List<String> ids = idProjections.getContent().stream().map(IdProjection::getId)
                    .collect(Collectors.toList());

            final long toBeDeleted = ids.size();
            totalAttemptedDeletions += toBeDeleted;
            // Due to optimizations for queries these entity mappings aren't reversed so cascade delete
            // isn't available and runtime exception thrown if you try to delete from the top.

            log.debug("Attempting to delete {} rows from job_executions...", toBeDeleted);
            final long deletedExecutions = this.jobExecutionRepo.deleteByIdIn(ids);
            log.debug("Successfully deleted {} rows from job_executions...", deletedExecutions);
            if (deletedExecutions != toBeDeleted) {
                log.error("Deleted {} job execution records but expected to delete {}", deletedExecutions,
                        toBeDeleted);
            }
            jobExecutionsDeleted += deletedExecutions;

            log.debug("Attempting to delete {} rows from job_metadata...", toBeDeleted);
            final long deletedMetadata = this.jobMetadataRepository.deleteByIdIn(ids);
            log.debug("Successfully deleted {} rows from job_metadata...", deletedMetadata);
            if (deletedMetadata != toBeDeleted) {
                log.error("Deleted {} job metadata records but expected to delete {}", deletedMetadata,
                        toBeDeleted);
            }
            jobMetadatasDeleted += deletedMetadata;

            log.debug("Attempting to delete {} rows from jobs...", toBeDeleted);
            final long deletedJobs = this.jobRepo.deleteByIdIn(ids);
            log.debug("Successfully deleted {} rows from jobs...", deletedJobs);
            if (deletedMetadata != toBeDeleted) {
                log.error("Deleted {} job records but expected to delete {}", deletedJobs, toBeDeleted);
            }
            jobsDeleted += deletedJobs;

            log.debug("Attempting to delete {} rows from job_requests...", toBeDeleted);
            final long deletedJobRequests = this.jobRequestRepo.deleteByIdIn(ids);
            log.debug("Successfully deleted {} rows from job_requests...", deletedJobRequests);
            if (deletedJobRequests != toBeDeleted) {
                log.error("Deleted {} job request records but expected to delete {}", deletedJobRequests,
                        toBeDeleted);
            }
            jobRequestsDeleted += deletedJobRequests;
        }
    } while (idProjections.hasNext() && totalAttemptedDeletions < maxDeleted);

    log.info("Deleted a chunk of {} job records: {} execution, {} metadata, {} job and {} job request records",
            totalAttemptedDeletions, jobExecutionsDeleted, jobMetadatasDeleted, jobsDeleted,
            jobRequestsDeleted);
    return totalAttemptedDeletions;
}

From source file:com.netflix.genie.web.jpa.services.JpaJobPersistenceServiceImpl.java

/**
 * {@inheritDoc}/*from   w  w w .j a v  a  2s .co  m*/
 */
@Override
public long deleteBatchOfJobsCreatedBeforeDate(@NotNull final Instant date, @Min(1) final int maxDeleted,
        @Min(1) final int pageSize) {
    log.info("Attempting to delete batch of jobs (at most {}) created before {} ms from epoch", maxDeleted,
            date.toEpochMilli());
    long jobsDeleted = 0;
    long totalAttemptedDeletions = 0;
    final Pageable page = PageRequest.of(0, pageSize);
    Slice<IdProjection> idProjections;
    do {
        idProjections = this.jobRepository.findByCreatedBefore(date, page);
        if (idProjections.hasContent()) {
            final List<Long> ids = idProjections.getContent().stream().map(IdProjection::getId)
                    .collect(Collectors.toList());

            final long toBeDeleted = ids.size();
            totalAttemptedDeletions += toBeDeleted;

            log.debug("Attempting to delete {} rows from jobs...", toBeDeleted);
            final long deletedJobs = this.jobRepository.deleteByIdIn(ids);
            log.debug("Successfully deleted {} rows from jobs...", deletedJobs);
            if (deletedJobs != toBeDeleted) {
                log.error("Deleted {} job records but expected to delete {}", deletedJobs, toBeDeleted);
            }
            jobsDeleted += deletedJobs;
        }
    } while (idProjections.hasNext() && totalAttemptedDeletions < maxDeleted);

    log.info("Deleted a chunk of {} job records: {} job", totalAttemptedDeletions, jobsDeleted);
    return totalAttemptedDeletions;
}

From source file:org.eclipse.hawkbit.mgmt.rest.resource.MgmtTargetResourceTest.java

@Test
@Description("Ensures that a post request for creating one target within a list works.")
public void createTargetsSingleEntryListReturnsSuccessful() throws Exception {
    final String knownName = "someName";
    final String knownControllerId = "controllerId1";
    final String knownDescription = "someDescription";
    final String createTargetsJson = getCreateTargetsListJsonString(knownControllerId, knownName,
            knownDescription);/*from  w w w . ja v  a2s.  c  om*/

    mvc.perform(post(MgmtRestConstants.TARGET_V1_REQUEST_MAPPING).content(createTargetsJson)
            .contentType(MediaType.APPLICATION_JSON)).andDo(MockMvcResultPrinter.print())
            .andExpect(status().is2xxSuccessful());

    final Slice<Target> findTargetsAll = targetManagement.findAll(PageRequest.of(0, 100));
    final Target target = findTargetsAll.getContent().get(0);
    assertThat(targetManagement.count()).isEqualTo(1);
    assertThat(target.getControllerId()).isEqualTo(knownControllerId);
    assertThat(target.getName()).isEqualTo(knownName);
    assertThat(target.getDescription()).isEqualTo(knownDescription);
}

From source file:org.eclipse.hawkbit.mgmt.rest.resource.MgmtTargetResourceTest.java

/**
 * helper method to create a target and start an action on it.
 *
 * @return The targetid of the created target.
 *///  w  w  w.  j  a v a  2 s.  co m
private Target createTargetAndStartAction() {
    // prepare test
    final DistributionSet dsA = testdataFactory.createDistributionSet("");
    final Target tA = testdataFactory.createTarget("target-id-A");
    // assign a distribution set so we get an active update action
    assignDistributionSet(dsA, Arrays.asList(tA));
    // verify active action
    final Slice<Action> actionsByTarget = deploymentManagement.findActionsByTarget(tA.getControllerId(), PAGE);
    assertThat(actionsByTarget.getContent()).hasSize(1);
    return targetManagement.getByControllerID(tA.getControllerId()).get();
}

From source file:org.eclipse.hawkbit.repository.jpa.JpaRolloutManagement.java

private void setRolloutStatusDetails(final Slice<JpaRollout> rollouts) {
    final List<Long> rolloutIds = rollouts.getContent().stream().map(Rollout::getId)
            .collect(Collectors.toList());
    final Map<Long, List<TotalTargetCountActionStatus>> allStatesForRollout = getStatusCountItemForRollout(
            rolloutIds);/* www.  j a  v a2 s .  c o m*/

    if (allStatesForRollout != null) {
        rollouts.forEach(rollout -> {
            final TotalTargetCountStatus totalTargetCountStatus = new TotalTargetCountStatus(
                    allStatesForRollout.get(rollout.getId()), rollout.getTotalTargets());
            rollout.setTotalTargetCountStatus(totalTargetCountStatus);
        });
    }
}

From source file:org.eclipse.hawkbit.repository.jpa.JpaTargetManagement.java

private static Slice<Target> convertPage(final Slice<JpaTarget> findAll, final Pageable pageable) {
    return new PageImpl<>(Collections.unmodifiableList(findAll.getContent()), pageable, 0);
}

From source file:org.eclipse.hawkbit.ui.rollout.rollout.RolloutBeanQuery.java

private static List<ProxyRollout> getProxyRolloutList(final Slice<Rollout> rolloutBeans) {
    return rolloutBeans.getContent().stream().map(RolloutBeanQuery::createProxy).collect(Collectors.toList());
}