List of usage examples for org.springframework.data.domain Page getTotalPages
int getTotalPages();
From source file:es.fdi.reservas.reserva.web.GestorController.java
@RequestMapping(value = "/gestor/administrar/edificios/eliminados/direccion/{direccion}/page/{pageNumber}", method = RequestMethod.GET) public String gestiona_edificio_eliminado_direccion(@PathVariable String direccion, @PathVariable Integer pageNumber, Model model) { User u = gestor_service.getUsuarioActual(); Pageable pageable = new PageRequest(pageNumber - 1, 5); Page<Edificio> currentResults = gestor_service.getEdificioDeletedByTagDireccionAndFacutadId(direccion, u.getFacultad().getId(), pageable); System.out.println("current" + currentResults.getNumberOfElements()); System.out.println("current" + currentResults.getContent().get(0).getNombreEdificio()); model.addAttribute("currentResults", currentResults); int current = currentResults.getNumber() + 1; int begin = Math.max(1, current - 5); int end = Math.min(begin + 10, currentResults.getTotalPages()); model.addAttribute("beginIndex", begin); model.addAttribute("endIndex", end); model.addAttribute("currentIndex", current); model.addAttribute("User", u); model.addAttribute("GruposReservas", gestor_service.getGrupoReservaByUserId(u.getId())); model.addAttribute("reservasPendientes", gestor_service.getReservasPendientes(u.getId(), EstadoReserva.PENDIENTE).size()); model.addAttribute("view", "gestor/papelera_edificios"); return "index"; }
From source file:uk.ac.ebi.ep.controller.BrowseTaxonomyController.java
@RequestMapping(value = SEARCH_BY_TAX_ID, method = RequestMethod.GET) public String searchByTaxId(@ModelAttribute("searchModel") SearchModel searchModel, @RequestParam(value = "entryid", required = true) Long entryID, @RequestParam(value = "entryname", required = false) String entryName, Model model, HttpServletRequest request, HttpSession session, Pageable pageable, RedirectAttributes attributes) { pageable = new PageRequest(0, SEARCH_PAGESIZE, Sort.Direction.ASC, "function", "entryType"); Page<UniprotEntry> page = this.enzymePortalService.findEnzymesByTaxonomy(entryID, pageable); List<Species> species = enzymePortalService.findSpeciesByTaxId(entryID); List<Compound> compouds = enzymePortalService.findCompoundsByTaxId(entryID); List<Disease> diseases = enzymePortalService.findDiseasesByTaxId(entryID); List<EcNumber> enzymeFamilies = enzymePortalService.findEnzymeFamiliesByTaxId(entryID); SearchParams searchParams = searchModel.getSearchparams(); searchParams.setStart(0);/*w w w . ja va2 s. com*/ searchParams.setType(SearchParams.SearchType.KEYWORD); searchParams.setText(entryName); searchParams.setSize(SEARCH_PAGESIZE); searchModel.setSearchparams(searchParams); List<UniprotEntry> result = page.getContent(); int current = page.getNumber() + 1; int begin = Math.max(1, current - 5); int end = Math.min(begin + 10, page.getTotalPages()); model.addAttribute("page", page); model.addAttribute("beginIndex", begin); model.addAttribute("endIndex", end); model.addAttribute("currentIndex", current); model.addAttribute("organismName", entryName); model.addAttribute("taxId", entryID); // model.addAttribute("summaryEntries", result); SearchResults searchResults = new SearchResults(); searchResults.setTotalfound(page.getTotalElements()); SearchFilters filters = new SearchFilters(); filters.setSpecies(species); filters.setCompounds(compouds); filters.setEcNumbers(enzymeFamilies); filters.setDiseases(diseases); searchResults.setSearchfilters(filters); searchResults.setSummaryentries(result); searchModel.setSearchresults(searchResults); model.addAttribute("searchModel", searchModel); model.addAttribute("searchConfig", searchConfig); model.addAttribute("searchFilter", filters); String searchKey = getSearchKey(searchModel.getSearchparams()); cacheSearch(session.getServletContext(), searchKey, searchResults); setLastSummaries(session, searchResults.getSummaryentries()); clearHistory(session); addToHistory(session, searchModel.getSearchparams().getType(), searchKey); return RESULT; }
From source file:uk.ac.ebi.ep.controller.BrowseTaxonomyController.java
@RequestMapping(value = SEARCH_BY_TAX_ID + "/page={pageNumber}", method = RequestMethod.GET) public String searchByTaxIdPaginated(@PathVariable Integer pageNumber, @ModelAttribute("searchModel") SearchModel searchModel, @RequestParam(value = "entryid", required = true) Long entryID, @RequestParam(value = "entryname", required = false) String entryName, Model model, HttpSession session, RedirectAttributes attributes) { if (pageNumber < 1) { pageNumber = 1;//from w w w.j a va2 s . co m } Pageable pageable = new PageRequest(pageNumber - 1, SEARCH_PAGESIZE, Sort.Direction.ASC, "function", "entryType"); Page<UniprotEntry> page = this.enzymePortalService.findEnzymesByTaxonomy(entryID, pageable); List<Species> species = enzymePortalService.findSpeciesByTaxId(entryID); List<Compound> compouds = enzymePortalService.findCompoundsByTaxId(entryID); List<Disease> diseases = enzymePortalService.findDiseasesByTaxId(entryID); List<EcNumber> enzymeFamilies = enzymePortalService.findEnzymeFamiliesByTaxId(entryID); SearchParams searchParams = searchModel.getSearchparams(); searchParams.setStart(0); searchParams.setType(SearchParams.SearchType.KEYWORD); searchParams.setText(entryName); searchParams.setSize(SEARCH_PAGESIZE); searchModel.setSearchparams(searchParams); List<UniprotEntry> result = page.getContent(); int current = page.getNumber() + 1; int begin = Math.max(1, current - 5); int end = Math.min(begin + 10, page.getTotalPages()); model.addAttribute("page", page); model.addAttribute("beginIndex", begin); model.addAttribute("endIndex", end); model.addAttribute("currentIndex", current); model.addAttribute("organismName", entryName); model.addAttribute("taxId", entryID); // model.addAttribute("summaryEntries", result); SearchResults searchResults = new SearchResults(); searchResults.setTotalfound(page.getTotalElements()); SearchFilters filters = new SearchFilters(); filters.setSpecies(species); filters.setCompounds(compouds); filters.setEcNumbers(enzymeFamilies); filters.setDiseases(diseases); searchResults.setSearchfilters(filters); searchResults.setSummaryentries(result); searchModel.setSearchresults(searchResults); model.addAttribute("searchModel", searchModel); model.addAttribute("searchConfig", searchConfig); model.addAttribute("searchFilter", filters); String searchKey = getSearchKey(searchModel.getSearchparams()); cacheSearch(session.getServletContext(), searchKey, searchResults); setLastSummaries(session, searchResults.getSummaryentries()); clearHistory(session); addToHistory(session, searchModel.getSearchparams().getType(), searchKey); return RESULT; }
From source file:uk.ac.ebi.ep.controller.BrowseTaxonomyController.java
@RequestMapping(value = FILTER_BY_FACETS, method = RequestMethod.GET) public String filterByFacets(@ModelAttribute("searchModel") SearchModel searchModel, @RequestParam(value = "taxId", required = true) Long taxId, @RequestParam(value = "organismName", required = false) String organismName, Model model, HttpServletRequest request, HttpSession session, RedirectAttributes attributes) { List<Species> species = enzymePortalService.findSpeciesByTaxId(taxId); List<Compound> compouds = enzymePortalService.findCompoundsByTaxId(taxId); List<Disease> diseases = enzymePortalService.findDiseasesByTaxId(taxId); List<EcNumber> enzymeFamilies = enzymePortalService.findEnzymeFamiliesByTaxId(taxId); SearchFilters filters = new SearchFilters(); filters.setSpecies(species);/*from www. j a va 2s . com*/ filters.setCompounds(compouds); filters.setDiseases(diseases); filters.setEcNumbers(enzymeFamilies); SearchParams searchParams = searchModel.getSearchparams(); searchParams.setText(organismName); searchParams.setSize(SEARCH_PAGESIZE); searchModel.setSearchparams(searchParams); SearchResults searchResults = new SearchResults(); searchResults.setSearchfilters(filters); searchModel.setSearchresults(searchResults); SearchParams searchParameters = searchModel.getSearchparams(); String compound_autocompleteFilter = request.getParameter("searchparams.compounds"); String specie_autocompleteFilter = request.getParameter("_ctempList_selected"); String diseases_autocompleteFilter = request.getParameter("_DtempList_selected"); // Filter: List<String> specieFilter = searchParameters.getSpecies(); List<String> compoundFilter = searchParameters.getCompounds(); List<String> diseaseFilter = searchParameters.getDiseases(); List<Integer> ecFilter = searchParameters.getEcFamilies(); //remove empty string in the filter to avoid unsual behavior of the filter facets if (specieFilter.contains("")) { specieFilter.remove(""); } if (compoundFilter.contains("")) { compoundFilter.remove(""); } if (diseaseFilter.contains("")) { diseaseFilter.remove(""); } //to ensure that the seleted item is used in species filter, add the selected to the list. this is a workaround. different JS were used for auto complete and normal filter if ((specie_autocompleteFilter != null && StringUtils.hasLength(specie_autocompleteFilter) == true) && StringUtils.isEmpty(compound_autocompleteFilter) && StringUtils.isEmpty(diseases_autocompleteFilter)) { specieFilter.add(specie_autocompleteFilter); } if ((diseases_autocompleteFilter != null && StringUtils.hasLength(diseases_autocompleteFilter) == true) && StringUtils.isEmpty(compound_autocompleteFilter) && StringUtils.isEmpty(specie_autocompleteFilter)) { diseaseFilter.add(diseases_autocompleteFilter); } //both from auto complete and normal selection. selected items are displayed on top the list and returns back to the orignial list when not selected. //SearchResults searchResults = resultSet; List<Species> defaultSpeciesList = searchResults.getSearchfilters().getSpecies(); resetSelectedSpecies(defaultSpeciesList); searchParameters.getSpecies().stream().forEach((selectedItems) -> { defaultSpeciesList.stream() .filter((theSpecies) -> (selectedItems.equals(theSpecies.getScientificname()))) .forEach((theSpecies) -> { theSpecies.setSelected(true); }); }); List<Compound> defaultCompoundList = searchResults.getSearchfilters().getCompounds(); resetSelectedCompounds(defaultCompoundList); searchParameters.getCompounds().stream().forEach((SelectedCompounds) -> { defaultCompoundList.stream().filter((theCompound) -> (SelectedCompounds.equals(theCompound.getName()))) .forEach((theCompound) -> { theCompound.setSelected(true); }); }); List<Disease> defaultDiseaseList = searchResults.getSearchfilters().getDiseases(); resetSelectedDisease(defaultDiseaseList); searchParameters.getDiseases().stream().forEach((selectedDisease) -> { defaultDiseaseList.stream().filter((disease) -> (selectedDisease.equals(disease.getName()))) .forEach((disease) -> { disease.setSelected(true); }); }); List<EcNumber> defaultEcNumberList = searchResults.getSearchfilters().getEcNumbers(); resetSelectedEcNumber(defaultEcNumberList); searchParameters.getEcFamilies().stream().forEach((selectedEcFamily) -> { defaultEcNumberList.stream().filter((ec) -> (selectedEcFamily.equals(ec.getEc()))).forEach((ec) -> { ec.setSelected(true); }); }); Pageable pageable = new PageRequest(0, SEARCH_PAGESIZE, Sort.Direction.ASC, "function", "entryType"); Page<UniprotEntry> page = new PageImpl<>(new ArrayList<>(), pageable, 0); //specie only if (specieFilter.isEmpty() && compoundFilter.isEmpty() && diseaseFilter.isEmpty()) { page = enzymePortalService.filterBySpecie(taxId, pageable); } //specie only if (!specieFilter.isEmpty() && compoundFilter.isEmpty() && diseaseFilter.isEmpty()) { page = enzymePortalService.filterBySpecie(taxId, pageable); } // compounds only if (!compoundFilter.isEmpty() && diseaseFilter.isEmpty()) { page = enzymePortalService.filterBySpecieAndCompounds(taxId, compoundFilter, pageable); } // disease only if (compoundFilter.isEmpty() && !diseaseFilter.isEmpty()) { page = enzymePortalService.filterBySpecieAndDiseases(taxId, diseaseFilter, pageable); } //ec only if (compoundFilter.isEmpty() && diseaseFilter.isEmpty() && !ecFilter.isEmpty()) { page = enzymePortalService.filterBySpecieAndEc(taxId, ecFilter, pageable); } //compound and diseases if (!compoundFilter.isEmpty() && !diseaseFilter.isEmpty() && ecFilter.isEmpty()) { page = enzymePortalService.filterBySpecieAndCompoundsAndDiseases(taxId, compoundFilter, diseaseFilter, pageable); } //compound and ec if (!compoundFilter.isEmpty() && !ecFilter.isEmpty() && diseaseFilter.isEmpty()) { page = enzymePortalService.filterBySpecieAndCompoundsAndEc(taxId, compoundFilter, ecFilter, pageable); } //disease and ec if (!ecFilter.isEmpty() && !diseaseFilter.isEmpty() && compoundFilter.isEmpty()) { page = enzymePortalService.filterBySpecieAndDiseasesAndEc(taxId, diseaseFilter, ecFilter, pageable); } //disease and compounds and ec if (!ecFilter.isEmpty() && !diseaseFilter.isEmpty() && !compoundFilter.isEmpty()) { page = enzymePortalService.filterBySpecieAndCompoundsAndDiseasesAndEc(taxId, compoundFilter, diseaseFilter, ecFilter, pageable); } model.addAttribute("searchFilter", filters); List<UniprotEntry> result = page.getContent(); int current = page.getNumber() + 1; int begin = Math.max(1, current - 5); int end = Math.min(begin + 10, page.getTotalPages()); model.addAttribute("page", page); model.addAttribute("beginIndex", begin); model.addAttribute("endIndex", end); model.addAttribute("currentIndex", current); model.addAttribute("organismName", organismName); model.addAttribute("taxId", taxId); model.addAttribute("summaryEntries", result); searchResults.setTotalfound(page.getTotalElements()); searchResults.setSearchfilters(filters); searchResults.setSummaryentries(result); searchModel.setSearchresults(searchResults); model.addAttribute("searchModel", searchModel); model.addAttribute("searchConfig", searchConfig); String searchKey = getSearchKey(searchModel.getSearchparams()); cacheSearch(session.getServletContext(), searchKey, searchResults); setLastSummaries(session, searchResults.getSummaryentries()); clearHistory(session); addToHistory(session, searchModel.getSearchparams().getType(), searchKey); return RESULT; }
From source file:org.jobscheduler.dashboard.web.rest.SchedulerHistoryResource.java
@RequestMapping("/schedulerHistories") @ApiOperation(value = "Get list scheduler history") public @ResponseBody ListDTO schedulerHistories(Model model, @RequestParam(value = "count") Integer count, @RequestParam(value = "page") Integer page, HttpServletRequest request) throws UnsupportedEncodingException { // Spring Data count from page 0, ngTable from page 1 page--;//from w w w . j a v a 2 s . c om Enumeration<String> parametersNames = request.getParameterNames(); // Parameters String jobName = "%"; String spoolerId = "%"; BigDecimal error = null; DateTime startDT = DateTime.now().minusDays(100); DateTime endDT = DateTime.now(); // For sorting List<Order> orders = new ArrayList<Order>(); while (parametersNames.hasMoreElements()) { String parameterName = (String) parametersNames.nextElement(); // Filtering if (parameterName.startsWith(Constant.PARAM_FILTER)) { String filter = request.getParameter(parameterName); String parameterFilter = parameterName.substring(parameterName.indexOf("[") + 1, parameterName.indexOf("]")); String decodedFilter = URLDecoder.decode(filter, "UTF-8"); if (parameterFilter.equals("jobName")) { jobName = "%" + decodedFilter + "%"; } if (parameterFilter.equals("spoolerId")) { spoolerId = "%" + decodedFilter + "%"; } if (parameterFilter.equals("error")) { error = new BigDecimal(filter); } if (parameterFilter.equals("startTime")) { startDT = DateTime.parse(decodedFilter, fmt); } if (parameterFilter.equals("endTime")) { endDT = DateTime.parse(decodedFilter, fmt); } log.info("Filter in get list history : " + parameterName + "=" + filter); } // Sorting if (parameterName.startsWith(Constant.PARAM_SORT)) { String directionParameter = request.getParameter(parameterName); String sortByColumnName = parameterName.substring(parameterName.indexOf("[") + 1, parameterName.indexOf("]")); String direction = URLDecoder.decode(directionParameter, "UTF-8"); orders.add(new Order(Direction.fromString(direction), sortByColumnName)); } } PageRequest pageable; if (orders.size() == 0) pageable = new PageRequest(page, count); else pageable = new PageRequest(page, count, new Sort(orders)); ListDTO dto = new ListDTO(); Page<SchedulerHistory> scheduleJob; if (error != null) scheduleJob = schedulerHistoryRepository.findByStartTimeBetweenAndJobNameLikeAndSpoolerIdLikeAndError( new Timestamp(startDT.getMillis()), new Timestamp(endDT.getMillis()), jobName, spoolerId, error, pageable); else scheduleJob = schedulerHistoryRepository.findByStartTimeBetweenAndJobNameLikeAndSpoolerIdLike( new Timestamp(startDT.getMillis()), new Timestamp(endDT.getMillis()), jobName, spoolerId, pageable); dto.setResult(scheduleJob.getContent()); dto.setTotalElements(scheduleJob.getTotalElements()); dto.setTotalPages(scheduleJob.getTotalPages()); return dto; }
From source file:org.jtalks.jcommune.model.dao.search.hibernate.TopicHibernateSearchDao.java
/** * Checks if this search was by made with too big page number specified * * @param searchResults search results/*from w w w . j a va 2 s . c om*/ * @return true if page number is too big */ private boolean isSearchedAboveLastPage(Page<Topic> searchResults) { return !searchResults.hasContent() && searchResults.getNumber() > searchResults.getTotalPages(); }
From source file:org.jtalks.jcommune.model.dao.search.hibernate.TopicHibernateSearchDaoTest.java
@Test public void testSearchPaging() { int totalSize = 50; int pageCount = 2; int pageSize = totalSize / pageCount; String searchText = "JCommune"; PageRequest pageRequest = new PageRequest("1", pageSize); List<Topic> topicList = PersistedObjectsFactory.createAndSaveTopicList(totalSize); for (Topic topic : topicList) { topic.setTitle(searchText);/*from www . j a va2 s . c o m*/ } saveAndFlushIndexes(topicList); configureMocks(searchText, searchText); Page<Topic> searchResultPage = topicSearchDao.searchByTitleAndContent(searchText, pageRequest, Arrays.asList(topicList.get(0).getBranch().getId())); assertEquals(searchResultPage.getContent().size(), pageSize, "Incorrect count of topics in one page."); assertEquals(searchResultPage.getTotalElements(), totalSize, "Incorrect total count."); assertEquals(searchResultPage.getTotalPages(), pageCount, "Incorrect count of pages."); }
From source file:org.lareferencia.backend.indexer.IndexerImpl.java
public synchronized boolean index(NetworkSnapshot snapshot) { try {/*from ww w.j a v a2s .com*/ // Borrado de los docs del pas del snapshot MessageDigest md = MessageDigest.getInstance("MD5"); String countryISO = snapshot.getNetwork().getCountryISO(); this.sendUpdateToSolr( "<delete><query>country_iso:" + snapshot.getNetwork().getCountryISO() + "</query></delete>"); // Update de los registros de a PAGE_SIZE Page<OAIRecord> page = recordRepository.findBySnapshotIdAndStatus(snapshot.getId(), RecordStatus.VALID, new PageRequest(0, PAGE_SIZE)); int totalPages = page.getTotalPages(); Long lastId = -1L; // Aqu se guarda el ultimo id de cada pgina para ser usado en el la query optimizada for (int i = 0; i < totalPages; i++) { Transformer trf = buildTransformer(); trf.setParameter("country_iso", countryISO); trf.setParameter("country", snapshot.getNetwork().getName()); //page = recordRepository.findBySnapshotIdAndStatusLimited(snapshot.getId(), RecordStatus.VALID, lastId, new PageRequest(0, PAGE_SIZE) ); page = recordRepository.findBySnapshotIdAndStatus(snapshot.getId(), RecordStatus.VALID, new PageRequest(i, PAGE_SIZE)); System.out.println("Indexando Snapshot: " + snapshot.getId() + " de: " + snapshot.getNetwork().getName() + " pgina: " + i + " de: " + totalPages); StringBuffer strBuf = new StringBuffer(); List<OAIRecord> records = page.getContent(); for (OAIRecord record : records) { OAIRecordMetadata domRecord = new OAIRecordMetadata(record.getIdentifier(), record.getPublishedXML()); StringWriter stringWritter = new StringWriter(); Result output = new StreamResult(stringWritter); // id unico pero mutable para solr trf.setParameter("solr_id", countryISO + "_" + snapshot.getId().toString() + "_" + record.getId().toString()); // id permantente para vufind trf.setParameter("vufind_id", countryISO + "_" + DigestUtils.md5Hex(record.getPublishedXML())); // header id para staff trf.setParameter("header_id", record.getIdentifier()); // Se transforma y genera el string del registro trf.transform(new DOMSource(domRecord.getDOMDocument()), output); strBuf.append(stringWritter.toString()); // Se actualiza el lastID para permitir la paginacin con offset 0 //lastId = records.get( records.size()-1 ).getId(); } this.sendUpdateToSolr("<add>" + strBuf.toString() + "</add>"); trf = null; page = null; strBuf = null; } // commit de los cambios this.sendUpdateToSolr("<commit/>"); } catch (Exception e) { e.printStackTrace(); try { this.sendUpdateToSolr("<rollback/>"); } catch (SolrServerException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } return false; } return true; }
From source file:org.lareferencia.backend.indexer.IntelligoIndexer.java
public synchronized boolean index(NetworkSnapshot snapshot) { try {//from w w w . j av a 2 s . c o m // Borrado de los docs del pas del snapshot Page<OAIRecord> page = recordRepository.findBySnapshotIdAndStatus(snapshot.getId(), RecordStatus.VALID, new PageRequest(0, PAGE_SIZE)); int totalPages = page.getTotalPages(); String filepath = outputPath + "/" + snapshot.getNetwork().getName().replace(" ", "_") + "_" + snapshot.getId(); boolean success = (new File(filepath)).mkdirs(); if (!success) { // Directory creation failed } for (int i = 0; i < totalPages; i++) { Transformer trf = buildTransformer(); //trf.setParameter("country", snapshot.getNetwork().getName() ); page = recordRepository.findBySnapshotIdAndStatus(snapshot.getId(), RecordStatus.VALID, new PageRequest(i, PAGE_SIZE)); System.out.println("Indexando Snapshot: " + snapshot.getId() + " de: " + snapshot.getNetwork().getName() + " pgina: " + i + " de: " + totalPages); StringBuffer strBuf = new StringBuffer(); List<OAIRecord> records = page.getContent(); strBuf.append("<add>"); for (OAIRecord record : records) { OAIRecordMetadata metadata = new OAIRecordMetadata(record.getIdentifier(), record.getPublishedXML()); StringWriter stringWritter = new StringWriter(); Result output = new StreamResult(stringWritter); // id unico pero mutable para solr trf.setParameter("solr_id", snapshot.getId() + "." + record.getId().toString()); /////// DC:DESCRIPTION - Deteccin y divisin de idiomas String ab_es = ""; String ab_en = ""; String ab_pt = ""; //System.out.println( metadata.getFieldOcurrences("dc:description") ); for (String ab : metadata.getFieldOcurrences("dc:description")) { String lang = detectLang(ab); switch (lang) { case "es": ab_es += ab; break; case "en": ab_en += ab; break; case "pt": ab_pt += ab; break; } } trf.setParameter("ab_es", ab_es); trf.setParameter("ab_en", ab_en); trf.setParameter("ab_pt", ab_pt); ///////////////////////////////////////////////////////////// /////// DC:title - Deteccin y divisin de idiomas String ti_es = ""; String ti_en = ""; String ti_pt = ""; for (String ti : metadata.getFieldOcurrences("dc:title")) { String lang = detectLang(ti); switch (lang) { case "es": ti_es += ti; break; case "en": ti_en += ti; break; case "pt": ti_pt += ti; break; } } trf.setParameter("ti_es", ti_es); trf.setParameter("ti_en", ti_en); trf.setParameter("ti_pt", ti_pt); ///////////////////////////////////////////////////////////// // Se transforma y genera el string del registro trf.transform(new DOMSource(metadata.getDOMDocument()), output); strBuf.append(stringWritter.toString()); } strBuf.append("</add>"); BufferedWriter out = new BufferedWriter(new FileWriter(filepath + "/" + i + ".solr.xml")); out.write(strBuf.toString()); out.close(); } } catch (Exception e) { e.printStackTrace(); return false; } return true; }
From source file:org.lareferencia.provider.providers.LaReferenciaProvider.java
public List<Record> listRecords(String set, StateHolder state, boolean includeMetadata) throws CannotDisseminateFormatException, NoRecordsMatchException { if (oaiRecordRepository == null) throw new IllegalStateException("listRecords() expects a non-null oairecord repository"); final List<Record> records = new ArrayList<Record>(); if (state.isFirstCall()) { List<Long> snapshotIdList = new ArrayList<Long>(); List<Integer> totalPageList = new ArrayList<Integer>(); // CASO DE SET DEFINIDO if (set != null && !set.toUpperCase().equals(DRIVER_SET_NAME)) { NationalNetwork network = nationalNetworkRepository.findByCountryISO(set); if (network == null) { throw new NoRecordsMatchException("Set dont exist"); } else { NetworkSnapshot snapshot = networkSnapshotRepository .findLastGoodKnowByNetworkID(network.getId()); if (snapshot != null) { // obtiene la primera pgina de cada snapshot Page<OAIRecord> page = oaiRecordRepository.findBySnapshotAndStatus(snapshot, RecordStatus.VALID, new PageRequest(0, PAGE_SIZE)); // agrega los datos del snapshot a la lista snapshotIdList.add(snapshot.getId()); totalPageList.add(page.getTotalPages()); }// w ww .j a v a2s . co m } } else { // CASO DE SET NULL // Se recorren todas las redes publicadas for (NationalNetwork network : nationalNetworkRepository.findByPublishedOrderByNameAsc(true)) { NetworkSnapshot snapshot = networkSnapshotRepository .findLastGoodKnowByNetworkID(network.getId()); if (snapshot != null) { // obtiene la primera pgina de cada snapshot Page<OAIRecord> page = oaiRecordRepository.findBySnapshotAndStatus(snapshot, RecordStatus.VALID, new PageRequest(0, PAGE_SIZE)); // agrega los datos del snapshot a la lista snapshotIdList.add(snapshot.getId()); totalPageList.add(page.getTotalPages()); } } } // se inicializa el estado state.initialize(snapshotIdList, totalPageList); } // obtiene la pgina actual NetworkSnapshot snapshot = networkSnapshotRepository.findOne(state.obtainActualSnapshotID()); Page<OAIRecord> page = oaiRecordRepository.findBySnapshotAndStatus(snapshot, RecordStatus.VALID, new PageRequest(state.obtainActualPage(), PAGE_SIZE)); // actualiza el estado state.update(); /** // Dates. final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmssSSS"); if(session.getFrom() != null && session.getUntil() != null) { final String formattedFrom = dateFormat.format(session.getFrom()); final String formattedUntil = dateFormat.format(session.getUntil()); query.setQuery(query.getQuery() + " AND lastModifiedDateFacet:[" + formattedFrom + " TO " + formattedUntil + "]"); } else if(session.getFrom() != null) { final String formattedFrom = dateFormat.format(session.getFrom()); query.setQuery(query.getQuery() + " AND lastModifiedDateFacet:[" + formattedFrom + " TO 99999999999999999]"); } else if(session.getUntil() != null) { final String formattedUntil = dateFormat.format(session.getUntil()); query.setQuery(query.getQuery() + " AND lastModifiedDateFacet:[00000000000000000 TO " + formattedUntil + "]"); } */ try { if (page.getContent().size() == 0) throw new NoRecordsMatchException(); for (OAIRecord oairecord : page.getContent()) { final Record record = new Record(); // Identifier. record.setIdentifier(buildIdentifier(oairecord)); record.setDate(dateFormat.format(oairecord.getDatestamp())); record.setDeleted(false); record.addSet(oairecord.getSnapshot().getNetwork().getCountryISO()); if (includeMetadata) record.setMetadata(oairecord.getPublishedXML()); records.add(record); } } finally { //session.setMemento(memento); } return records; }