List of usage examples for org.apache.commons.io FileUtils byteCountToDisplaySize
public static String byteCountToDisplaySize(long size)
From source file:net.sf.zekr.engine.audio.AudioCacheManager.java
/** * Removes {@link #getCapacitySize()} megabytes of files from user's audio cache, so that cache size limit * ({@link #getCapacitySize()}) is met. It simply ignores if audio cache size is not exceeded * {@link #getCapacitySize()}./*from ww w .j a v a 2 s . c o m*/ */ @SuppressWarnings("unchecked") public void flushCache() { logger.info("Flush audio cache."); long cacheSize = FileUtils.sizeOfDirectory(userPath); if (cacheSize > FileUtils.ONE_MB * capacitySize) { logger.info("Capacity size is " + capacitySize + " MB, of which " + FileUtils.byteCountToDisplaySize(cacheSize) + " is used. Flush size is " + flushSize + " MB."); Collection<File> audioDirectoryList = FileUtils.listFiles(userPath, new AbstractFileFilter() { public boolean accept(File file) { if (file.isDirectory()) { return true; } else { return false; } } }, null); List<File> fileList = new ArrayList<File>(); for (File dir : audioDirectoryList) { fileList.addAll(Arrays.asList(dir.listFiles())); } // return older files first Collections.sort(fileList, LastModifiedFileComparator.LASTMODIFIED_REVERSE); long deleteSize = 0; for (int i = 0; i < fileList.size(); i++) { if (deleteSize > flushSize + FileUtils.ONE_MB) { logger.info("Cache flushing suffices. " + FileUtils.byteCountToDisplaySize(deleteSize) + " were deleted."); break; } File file = fileList.get(i); deleteSize += file.length(); logger.debug("Delete: " + file); file.delete(); } } else { logger.info("No flush is required."); } }
From source file:com.dotosoft.dotoquiz.tools.thirdparty.PicasawebClient.java
public boolean downloadPhoto(File saveLocation, PhotoEntry photo) throws IOException, ParseException { boolean downloadSuccess = false; final int BUFFER_SIZE = 8096; final int TIMEOUT_MS = 10 * 1000; File saveFolder = saveLocation.getParentFile(); boolean createdFolder = false; if (!saveFolder.exists()) { log.info("Creating local folder: " + saveFolder.getName()); if (!saveFolder.mkdirs()) throw new IOException("Unable to create folder " + saveFolder.getName()); createdFolder = true;//from w w w .j a v a 2 s. c o m } log.debug("Beginning download for " + saveLocation + "..."); File tempFile = new File(saveLocation + ".tmp"); tempFile.deleteOnExit(); FileOutputStream fos = new FileOutputStream(tempFile); List<MediaContent> media = photo.getMediaContents(); URL fileUrl = new URL(photo.getMediaContents().get(0).getUrl()); if (media.size() > 1) { if (media.size() > 2) { log.debug("Extracting h264 video stream..."); fileUrl = new URL(photo.getMediaContents().get(2).getUrl()); } else { log.debug("Extracting low-res video stream..."); fileUrl = new URL(photo.getMediaContents().get(1).getUrl()); } } try { URLConnection conn = fileUrl.openConnection(); conn.setConnectTimeout(TIMEOUT_MS); conn.setReadTimeout(TIMEOUT_MS); InputStream dis = conn.getInputStream(); int totalRead = 0; int readCount = 0; byte b[] = new byte[BUFFER_SIZE]; while ((readCount = dis.read(b)) != 0 && readCount != -1) { totalRead += readCount; fos.write(b, 0, readCount); } dis.close(); fos.close(); if (!tempFile.renameTo(saveLocation)) throw new IOException("Unable to rename temp file to " + saveLocation); // Fix up the timestamps from the photo metadata updateTimeFromTags(saveLocation, photo, createdFolder); log.info("Written " + FileUtils.byteCountToDisplaySize(totalRead) + " to " + saveLocation + " successfully."); downloadSuccess = true; } catch (ConnectException ex) { log.warn("Network connection downloading " + fileUrl, ex); saveLocation = null; } catch (Exception ex) { log.error("Unexpected exception downloading " + fileUrl, ex); saveLocation = null; } return downloadSuccess; }
From source file:com.linkedin.drelephant.mapreduce.heuristics.GenericMemoryHeuristic.java
@Override public HeuristicResult apply(MapReduceApplicationData data) { if (!data.getSucceeded()) { return null; }/*from w ww .j a va2 s . c o m*/ String containerSizeStr = data.getConf().getProperty(_containerMemConf); if (containerSizeStr == null) { return null; } long containerMem; try { containerMem = Long.parseLong(containerSizeStr); } catch (NumberFormatException e) { // Some job has a string var like "${VAR}" for this config. if (containerSizeStr.startsWith("$")) { String realContainerConf = containerSizeStr.substring(containerSizeStr.indexOf("{") + 1, containerSizeStr.indexOf("}")); containerMem = Long.parseLong(data.getConf().getProperty(realContainerConf)); } else { throw e; } } containerMem *= FileUtils.ONE_MB; MapReduceTaskData[] tasks = getTasks(data); List<Long> taskPMems = new ArrayList<Long>(); List<Long> taskVMems = new ArrayList<Long>(); List<Long> runtimesMs = new ArrayList<Long>(); long taskPMin = Long.MAX_VALUE; long taskPMax = 0; for (MapReduceTaskData task : tasks) { if (task.isSampled()) { runtimesMs.add(task.getTotalRunTimeMs()); long taskPMem = task.getCounters().get(MapReduceCounterData.CounterName.PHYSICAL_MEMORY_BYTES); long taskVMem = task.getCounters().get(MapReduceCounterData.CounterName.VIRTUAL_MEMORY_BYTES); taskPMems.add(taskPMem); taskPMin = Math.min(taskPMin, taskPMem); taskPMax = Math.max(taskPMax, taskPMem); taskVMems.add(taskVMem); } } if (taskPMin == Long.MAX_VALUE) { taskPMin = 0; } long taskPMemAvg = Statistics.average(taskPMems); long taskVMemAvg = Statistics.average(taskVMems); long averageTimeMs = Statistics.average(runtimesMs); Severity severity; if (tasks.length == 0) { severity = Severity.NONE; } else { severity = getTaskMemoryUtilSeverity(taskPMemAvg, containerMem); } HeuristicResult result = new HeuristicResult(_heuristicConfData.getClassName(), _heuristicConfData.getHeuristicName(), severity, Utils.getHeuristicScore(severity, tasks.length)); result.addResultDetail("Number of tasks", Integer.toString(tasks.length)); result.addResultDetail("Avg task runtime", Statistics.readableTimespan(averageTimeMs)); result.addResultDetail("Avg Physical Memory (MB)", Long.toString(taskPMemAvg / FileUtils.ONE_MB)); result.addResultDetail("Max Physical Memory (MB)", Long.toString(taskPMax / FileUtils.ONE_MB)); result.addResultDetail("Min Physical Memory (MB)", Long.toString(taskPMin / FileUtils.ONE_MB)); result.addResultDetail("Avg Virtual Memory (MB)", Long.toString(taskVMemAvg / FileUtils.ONE_MB)); result.addResultDetail("Requested Container Memory", FileUtils.byteCountToDisplaySize(containerMem)); return result; }
From source file:net.sf.logsaw.index.internal.LuceneIndexServiceImpl.java
@Override public String size(ILogResource log) { Assert.isNotNull(log, "log"); //$NON-NLS-1$ long size = FileUtils.sizeOfDirectory(IndexPlugin.getDefault().getIndexFile(log)); return FileUtils.byteCountToDisplaySize(size); }
From source file:com.gitblit.wicket.pages.FilestorePage.java
public FilestorePage(PageParameters params) { super(params); setupPage("", ""); int itemsPerPage = app().settings().getInteger(Keys.web.itemsPerPage, 20); if (itemsPerPage <= 1) { itemsPerPage = 20;//from ww w . j a va 2 s. c o m } final int pageNumber = WicketUtils.getPage(params); final String filter = WicketUtils.getSearchString(params); int prevPage = Math.max(0, pageNumber - 1); int nextPage = pageNumber + 1; boolean hasMore = false; final UserModel user = (GitBlitWebSession.get().getUser() == null) ? UserModel.ANONYMOUS : GitBlitWebSession.get().getUser(); final long nBytesUsed = app().filestore().getFilestoreUsedByteCount(); final long nBytesAvailable = app().filestore().getFilestoreAvailableByteCount(); List<FilestoreModel> files = app().filestore().getAllObjects(user); if (files == null) { files = new ArrayList<FilestoreModel>(); } long nOk = 0; long nPending = 0; long nInprogress = 0; long nError = 0; long nDeleted = 0; for (FilestoreModel file : files) { switch (file.getStatus()) { case Available: { nOk++; } break; case Upload_Pending: { nPending++; } break; case Upload_In_Progress: { nInprogress++; } break; case Deleted: { nDeleted++; } break; default: { nError++; } break; } } BookmarkablePageLink<Void> itemOk = new BookmarkablePageLink<Void>("filterByOk", FilestorePage.class, WicketUtils.newFilestorePageParameter(prevPage, SortBy.ok.name())); BookmarkablePageLink<Void> itemPending = new BookmarkablePageLink<Void>("filterByPending", FilestorePage.class, WicketUtils.newFilestorePageParameter(prevPage, SortBy.pending.name())); BookmarkablePageLink<Void> itemInprogress = new BookmarkablePageLink<Void>("filterByInprogress", FilestorePage.class, WicketUtils.newFilestorePageParameter(prevPage, SortBy.inprogress.name())); BookmarkablePageLink<Void> itemError = new BookmarkablePageLink<Void>("filterByError", FilestorePage.class, WicketUtils.newFilestorePageParameter(prevPage, SortBy.error.name())); BookmarkablePageLink<Void> itemDeleted = new BookmarkablePageLink<Void>("filterByDeleted", FilestorePage.class, WicketUtils.newFilestorePageParameter(prevPage, SortBy.deleted.name())); List<FilestoreModel> filteredResults = new ArrayList<FilestoreModel>(files.size()); if (filter == null) { filteredResults = files; } else if (filter.equals(SortBy.ok.name())) { WicketUtils.setCssClass(itemOk, "filter-on"); for (FilestoreModel item : files) { if (item.getStatus() == Status.Available) { filteredResults.add(item); } } } else if (filter.equals(SortBy.pending.name())) { WicketUtils.setCssClass(itemPending, "filter-on"); for (FilestoreModel item : files) { if (item.getStatus() == Status.Upload_Pending) { filteredResults.add(item); } } } else if (filter.equals(SortBy.inprogress.name())) { WicketUtils.setCssClass(itemInprogress, "filter-on"); for (FilestoreModel item : files) { if (item.getStatus() == Status.Upload_In_Progress) { filteredResults.add(item); } } } else if (filter.equals(SortBy.error.name())) { WicketUtils.setCssClass(itemError, "filter-on"); for (FilestoreModel item : files) { if (item.isInErrorState()) { filteredResults.add(item); } } } else if (filter.equals(SortBy.deleted.name())) { WicketUtils.setCssClass(itemDeleted, "filter-on"); for (FilestoreModel item : files) { if (item.getStatus() == Status.Deleted) { filteredResults.add(item); } } } DataView<FilestoreModel> filesView = new DataView<FilestoreModel>("fileRow", new ListDataProvider<FilestoreModel>(filteredResults), itemsPerPage) { private static final long serialVersionUID = 1L; private int counter; @Override protected void onBeforeRender() { super.onBeforeRender(); counter = 0; } @Override public void populateItem(final Item<FilestoreModel> item) { final FilestoreModel entry = item.getModelObject(); DateFormat dateFormater = new SimpleDateFormat(Constants.ISO8601); UserModel user = app().users().getUserModel(entry.getChangedBy()); user = user == null ? UserModel.ANONYMOUS : user; Label icon = FilestoreUI.getStatusIcon("status", entry); item.add(icon); item.add(new Label("on", dateFormater.format(entry.getChangedOn()))); item.add(new Label("by", user.getDisplayName())); item.add(new Label("oid", entry.oid)); item.add(new Label("size", FileUtils.byteCountToDisplaySize(entry.getSize()))); WicketUtils.setAlternatingBackground(item, counter); counter++; } }; if (filteredResults.size() < itemsPerPage) { filesView.setCurrentPage(0); hasMore = false; } else { filesView.setCurrentPage(pageNumber - 1); hasMore = true; } add(filesView); add(new BookmarkablePageLink<Void>("firstPageBottom", FilestorePage.class).setEnabled(pageNumber > 1)); add(new BookmarkablePageLink<Void>("prevPageBottom", FilestorePage.class, WicketUtils.newFilestorePageParameter(prevPage, filter)).setEnabled(pageNumber > 1)); add(new BookmarkablePageLink<Void>("nextPageBottom", FilestorePage.class, WicketUtils.newFilestorePageParameter(nextPage, filter)).setEnabled(hasMore)); itemOk.add(FilestoreUI.getStatusIcon("statusOkIcon", FilestoreModel.Status.Available)); itemPending.add(FilestoreUI.getStatusIcon("statusPendingIcon", FilestoreModel.Status.Upload_Pending)); itemInprogress .add(FilestoreUI.getStatusIcon("statusInprogressIcon", FilestoreModel.Status.Upload_In_Progress)); itemError.add(FilestoreUI.getStatusIcon("statusErrorIcon", FilestoreModel.Status.Error_Unknown)); itemDeleted.add(FilestoreUI.getStatusIcon("statusDeletedIcon", FilestoreModel.Status.Deleted)); itemOk.add(new Label("statusOkCount", String.valueOf(nOk))); itemPending.add(new Label("statusPendingCount", String.valueOf(nPending))); itemInprogress.add(new Label("statusInprogressCount", String.valueOf(nInprogress))); itemError.add(new Label("statusErrorCount", String.valueOf(nError))); itemDeleted.add(new Label("statusDeletedCount", String.valueOf(nDeleted))); add(itemOk); add(itemPending); add(itemInprogress); add(itemError); add(itemDeleted); add(new Label("spaceAvailable", String.format("%s / %s", FileUtils.byteCountToDisplaySize(nBytesUsed), FileUtils.byteCountToDisplaySize(nBytesAvailable)))); BookmarkablePageLink<Void> helpLink = new BookmarkablePageLink<Void>("filestoreHelp", FilestoreUsage.class); helpLink.add(new Label("helpMessage", getString("gb.filestoreHelp"))); add(helpLink); }
From source file:de.hybris.platform.customerticketingaddon.controllers.pages.AccountSupportTicketsPageController.java
/** * Used for retrieving page to create a customer support ticket. * * @param model/* w w w .jav a 2 s. c o m*/ * @return View String * @throws CMSItemNotFoundException */ @RequestMapping(value = "/add-support-ticket", method = RequestMethod.GET) @RequireHardLogIn public String addSupportTicket(final Model model) throws CMSItemNotFoundException { storeCmsPageInModel(model, getContentPageForLabelOrId(CustomerticketingaddonConstants.ADD_SUPPORT_TICKET_PAGE)); setUpMetaDataForContentPage(model, getContentPageForLabelOrId(CustomerticketingaddonConstants.ADD_SUPPORT_TICKET_PAGE)); model.addAttribute(WebConstants.BREADCRUMBS_KEY, getBreadcrumbs(CustomerticketingaddonConstants.TEXT_SUPPORT_TICKETING_ADD)); model.addAttribute(ThirdPartyConstants.SeoRobots.META_ROBOTS, ThirdPartyConstants.SeoRobots.NOINDEX_NOFOLLOW); model.addAttribute(CustomerticketingaddonConstants.SUPPORT_TICKET_FORM, new SupportTicketForm()); model.addAttribute(CustomerticketingaddonConstants.MAX_UPLOAD_SIZE, Long.valueOf(maxUploadSizeValue)); model.addAttribute(CustomerticketingaddonConstants.MAX_UPLOAD_SIZE_MB, FileUtils.byteCountToDisplaySize(maxUploadSizeValue)); try { model.addAttribute(CustomerticketingaddonConstants.SUPPORT_TICKET_ASSOCIATED_OBJECTS, ticketFacade.getAssociatedToObjects()); model.addAttribute(CustomerticketingaddonConstants.SUPPORT_TICKET_CATEGORIES, ticketFacade.getTicketCategories()); } catch (final UnsupportedOperationException ex) { LOG.error(ex.getMessage(), ex); } return getViewForPage(model); }
From source file:com.linkedin.drelephant.tez.heuristics.GenericDataSkewHeuristic.java
public HeuristicResult apply(TezApplicationData data) { if (!data.getSucceeded()) { return null; }// w w w .j ava 2 s.c o m TezTaskData[] tasks = getTasks(data); //Gathering data for checking time skew List<Long> timeTaken = new ArrayList<Long>(); for (int i = 0; i < tasks.length; i++) { if (tasks[i].isSampled()) { timeTaken.add(tasks[i].getTotalRunTimeMs()); } } long[][] groupsTime = Statistics.findTwoGroups(Longs.toArray(timeTaken)); long timeAvg1 = Statistics.average(groupsTime[0]); long timeAvg2 = Statistics.average(groupsTime[1]); //seconds are used for calculating deviation as they provide a better idea than millisecond. long timeAvgSec1 = TimeUnit.MILLISECONDS.toSeconds(timeAvg1); long timeAvgSec2 = TimeUnit.MILLISECONDS.toSeconds(timeAvg2); long minTime = Math.min(timeAvgSec1, timeAvgSec2); long diffTime = Math.abs(timeAvgSec1 - timeAvgSec2); //using the same deviation limits for time skew as for data skew. It can be changed in the fututre. Severity severityTime = getDeviationSeverity(minTime, diffTime); //This reduces severity if number of tasks is insignificant severityTime = Severity.min(severityTime, Severity.getSeverityAscending(groupsTime[0].length, numTasksLimits[0], numTasksLimits[1], numTasksLimits[2], numTasksLimits[3])); //Gather data List<Long> inputSizes = new ArrayList<Long>(); for (int i = 0; i < tasks.length; i++) { if (tasks[i].isSampled()) { long inputByte = 0; for (TezCounterData.CounterName counterName : _counterNames) { inputByte += tasks[i].getCounters().get(counterName); } inputSizes.add(inputByte); } } long[][] groups = Statistics.findTwoGroups(Longs.toArray(inputSizes)); long avg1 = Statistics.average(groups[0]); long avg2 = Statistics.average(groups[1]); long min = Math.min(avg1, avg2); long diff = Math.abs(avg2 - avg1); Severity severityData = getDeviationSeverity(min, diff); //This reduces severity if the largest file sizes are insignificant severityData = Severity.min(severityData, getFilesSeverity(avg2)); //This reduces severity if number of tasks is insignificant severityData = Severity.min(severityData, Severity.getSeverityAscending(groups[0].length, numTasksLimits[0], numTasksLimits[1], numTasksLimits[2], numTasksLimits[3])); Severity severity = Severity.max(severityData, severityTime); HeuristicResult result = new HeuristicResult(_heuristicConfData.getClassName(), _heuristicConfData.getHeuristicName(), severity, Utils.getHeuristicScore(severityData, tasks.length)); result.addResultDetail("Data skew (Number of tasks)", Integer.toString(tasks.length)); result.addResultDetail("Data skew (Group A)", groups[0].length + " tasks @ " + FileUtils.byteCountToDisplaySize(avg1) + " avg"); result.addResultDetail("Data skew (Group B)", groups[1].length + " tasks @ " + FileUtils.byteCountToDisplaySize(avg2) + " avg"); result.addResultDetail("Time skew (Number of tasks)", Integer.toString(tasks.length)); result.addResultDetail("Time skew (Group A)", groupsTime[0].length + " tasks @ " + convertTimeMs(timeAvg1) + " avg"); result.addResultDetail("Time skew (Group B)", groupsTime[1].length + " tasks @ " + convertTimeMs(timeAvg2) + " avg"); return result; }
From source file:com.otway.picasasync.syncutil.ImageSync.java
private boolean localCopyNeedsUpdating(PhotoEntry photo, File localPath) throws ImageReadException, IOException, ServiceException { boolean updateLocal = false; if (localPath.exists()) { LocalDateTime localMod = getTimeFromMS(localPath.lastModified()); LocalDateTime remoteMod = getTimeFromMS(photo.getUpdated().getValue()); long seconds = ChronoUnit.SECONDS.between(localMod, remoteMod); if (Math.abs(seconds) > 1) return true; long localFileSize = localPath.length(); long remoteFileSize = photo.getSize(); if (localFileSize != remoteFileSize) { log.info(String.format( "File sizes are different: (local %s vs remote %s). Local file will be updated.", FileUtils.byteCountToDisplaySize(localFileSize), FileUtils.byteCountToDisplaySize(remoteFileSize))); return true; }//ww w . ja v a 2s . c o m ImageInformation localInfo = ImageInformation.safeReadImageInformation(localPath); if (localInfo != null) { Integer rotation = photo.getRotation(); if (rotation != null) log.info("PhotoEntry rotation was set!"); // Make sure we take into account the rotation of the image when comparing width/height long localWidth = localInfo.getWidthHeightTransposed() ? localInfo.getHeight() : localInfo.getWidth(); long localHeight = localInfo.getWidthHeightTransposed() ? localInfo.getWidth() : localInfo.getHeight(); if (localWidth != photo.getWidth() || localHeight != photo.getHeight()) { log.info(String.format( "Image dimensions are different: (local %dx%d vs remote %dx%d). Local file will be updated.", localInfo.getWidth(), localInfo.getHeight(), photo.getWidth(), photo.getHeight())); return true; } } else { log.warn("Local file was not image! Renaming before overwrite. (" + localPath.getName() + ")"); File renamed = new File(localPath + ".old"); if (!localPath.renameTo(renamed)) log.warn("Unable to rename file"); updateLocal = true; } } else { log.debug("No local file existed: " + localPath); // Nothing here, so always write updateLocal = true; } return updateLocal; }
From source file:com.linkedin.drelephant.mapreduce.heuristics.GenericSkewHeuristic.java
@Override public HeuristicResult apply(MapReduceApplicationData data) { if (!data.getSucceeded()) { return null; }//from w w w . jav a 2s .co m MapReduceTaskData[] tasks = getTasks(data); //Gathering data for checking time skew List<Long> timeTaken = new ArrayList<Long>(); for (int i = 0; i < tasks.length; i++) { if (tasks[i].isTimeDataPresent()) { timeTaken.add(tasks[i].getTotalRunTimeMs()); } } long[][] groupsTime = Statistics.findTwoGroups(Longs.toArray(timeTaken)); long timeAvg1 = Statistics.average(groupsTime[0]); long timeAvg2 = Statistics.average(groupsTime[1]); //seconds are used for calculating deviation as they provide a better idea than millisecond. long timeAvgSec1 = TimeUnit.MILLISECONDS.toSeconds(timeAvg1); long timeAvgSec2 = TimeUnit.MILLISECONDS.toSeconds(timeAvg2); long minTime = Math.min(timeAvgSec1, timeAvgSec2); long diffTime = Math.abs(timeAvgSec1 - timeAvgSec2); //using the same deviation limits for time skew as for data skew. It can be changed in the fututre. Severity severityTime = getDeviationSeverity(minTime, diffTime); //This reduces severity if number of tasks is insignificant severityTime = Severity.min(severityTime, Severity.getSeverityAscending(groupsTime[0].length, numTasksLimits[0], numTasksLimits[1], numTasksLimits[2], numTasksLimits[3])); //Gather data List<Long> inputBytes = new ArrayList<Long>(); for (int i = 0; i < tasks.length; i++) { if (tasks[i].isCounterDataPresent()) { long inputByte = 0; for (MapReduceCounterData.CounterName counterName : _counterNames) { inputByte += tasks[i].getCounters().get(counterName); } inputBytes.add(inputByte); } } // Ratio of total tasks / sampled tasks double scale = ((double) tasks.length) / inputBytes.size(); //Analyze data. TODO: This is a temp fix. findTwogroups should support list as input long[][] groups = Statistics.findTwoGroups(Longs.toArray(inputBytes)); long avg1 = Statistics.average(groups[0]); long avg2 = Statistics.average(groups[1]); long min = Math.min(avg1, avg2); long diff = Math.abs(avg2 - avg1); Severity severityData = getDeviationSeverity(min, diff); //This reduces severity if the largest file sizes are insignificant severityData = Severity.min(severityData, getFilesSeverity(avg2)); //This reduces severity if number of tasks is insignificant severityData = Severity.min(severityData, Severity.getSeverityAscending(groups[0].length, numTasksLimits[0], numTasksLimits[1], numTasksLimits[2], numTasksLimits[3])); Severity severity = Severity.max(severityData, severityTime); HeuristicResult result = new HeuristicResult(_heuristicConfData.getClassName(), _heuristicConfData.getHeuristicName(), severity, Utils.getHeuristicScore(severityData, tasks.length)); result.addResultDetail("Data skew (Number of tasks)", Integer.toString(tasks.length)); result.addResultDetail("Data skew (Group A)", groups[0].length + " tasks @ " + FileUtils.byteCountToDisplaySize(avg1) + " avg"); result.addResultDetail("Data skew (Group B)", groups[1].length + " tasks @ " + FileUtils.byteCountToDisplaySize(avg2) + " avg"); result.addResultDetail("Time skew (Number of tasks)", Integer.toString(tasks.length)); result.addResultDetail("Time skew (Group A)", groupsTime[0].length + " tasks @ " + convertTimeMs(timeAvg1) + " avg"); result.addResultDetail("Time skew (Group B)", groupsTime[1].length + " tasks @ " + convertTimeMs(timeAvg2) + " avg"); return result; }
From source file:com.jaeksoft.searchlib.scheduler.task.TaskQueryXsltPost.java
@Override public void execute(Client client, TaskProperties properties, Variables variables, TaskLog taskLog) throws SearchLibException { taskLog.setInfo("Query check"); String searchTemplate = properties.getValue(propSearchTemplate); String queryString = properties.getValue(propQueryString); String xsl = properties.getValue(propXsl); String url = properties.getValue(propUrl); String httpLogin = properties.getValue(propHttpLogin); String httpPassword = properties.getValue(propHttpPassword); String contentType = properties.getValue(propHttpContentType); boolean useProxy = Boolean.TRUE.toString().equals(properties.getValue(propUseProxy)); URI uri;//from w ww . j av a 2s. co m try { uri = new URI(url); } catch (URISyntaxException e) { throw new SearchLibException(e); } AbstractSearchRequest searchRequest = (AbstractSearchRequest) client.getNewRequest(searchTemplate); if (searchRequest == null) throw new SearchLibException("Request template " + searchTemplate + " not found"); searchRequest.setQueryString(queryString); taskLog.setInfo("Execute request " + searchTemplate); AbstractResultSearch resultSearch = (AbstractResultSearch) client.request(searchRequest); StringWriter sw = null; PrintWriter pw = null; StringReader sr = null; try { sw = new StringWriter(); pw = new PrintWriter(sw); taskLog.setInfo("Render XML"); new RenderSearchXml(resultSearch).render(pw); pw.close(); pw = null; sw.close(); String content = sw.toString(); sw = null; sr = new StringReader(content); if (!StringUtils.isEmpty(xsl)) { taskLog.setInfo("XSL transformation"); content = DomUtils.xslt(new StreamSource(sr), xsl); if (content == null) throw new SearchLibException("XSL transformation failed"); } CredentialItem credentialItem = null; if (!StringUtils.isEmpty(httpLogin) && !StringUtils.isEmpty(httpPassword)) credentialItem = new CredentialItem(CredentialType.BASIC_DIGEST, null, httpLogin, httpPassword, null, null); HttpDownloader downloader = client.getWebCrawlMaster().getNewHttpDownloader(true, null, useProxy); List<HeaderItem> headerItems = null; if (!StringUtils.isEmpty(contentType)) { headerItems = new ArrayList<HeaderItem>(1); headerItems.add(new HeaderItem("Content-Type", contentType)); } taskLog.setInfo("Uploading " + FileUtils.byteCountToDisplaySize(content.length())); DownloadItem downloadItem = downloader.post(uri, credentialItem, headerItems, null, new StringEntity(content)); downloadItem.checkNoErrorRange(200, 201); taskLog.setInfo("Done"); } catch (Exception e) { throw new SearchLibException(e); } finally { IOUtils.close(pw, sw, sr); } }