List of usage examples for org.apache.commons.io FileUtils isFileNewer
public static boolean isFileNewer(File file, long timeMillis)
File
is newer than the specified time reference. From source file:com.sforce.cd.apexUnit.report.ApexReportGeneratorTest.java
@Test public void generateHTMLReportTest() { TestStatusPollerAndResultHandler queryPollerAndResultHandler = new TestStatusPollerAndResultHandler(); ApexReportBean[] apexReportBeans = queryPollerAndResultHandler.fetchResultsFromParentJobId(parentJobId, conn);//from ww w . j a v a 2s . c o m CodeCoverageComputer codeCoverageComputer = new CodeCoverageComputer(); Long justBeforeReportGeneration = System.currentTimeMillis(); ApexClassCodeCoverageBean[] apexClassCodeCoverageBeans = codeCoverageComputer .calculateAggregatedCodeCoverageUsingToolingAPI(); if (apexClassCodeCoverageBeans != null) { ApexCodeCoverageReportGenerator.generateHTMLReport(apexClassCodeCoverageBeans); } String reportFilePath = System.getProperty("user.dir") + System.getProperty("file.separator") + "Report" + System.getProperty("file.separator") + "ApexUnitReport.html"; File reportFile = new File(reportFilePath); Assert.assertTrue(FileUtils.isFileNewer(reportFile, justBeforeReportGeneration)); }
From source file:net.sf.zekr.engine.search.lucene.LuceneIndexManager.java
/** * Creates an index in the place user selects. This method first checks if an index already exists for * all-users or not. If not it continues to ask where to create index files.<br> * It uses underlying cache to store {@link ZekrIndexReader}s already read in this session. * //from w ww . j a va 2s . c o m * @param pathArray the first element should be for me-only mode, the second element for all-users. * @param quranText * @param indexId * @param indexPath * @param indexPathKey * @param indexVersionKey * @return cached or newly-created {@link ZekrIndexReader} instance * @throws IndexingException */ @SuppressWarnings("unchecked") private ZekrIndexReader getIndex(String[] pathArray, IQuranText quranText, String indexId, String indexPath, String indexPathKey, String indexVersionKey) throws IndexingException { try { ZekrIndexReader zir = indexReaderMap.get(indexId); if (zir == null) { if (indexPath != null && IndexReader.indexExists(new SimpleFSDirectory(new File(indexPath)))) { return newIndexReader(quranText, indexId, indexPath); } else { // check if index is already created for all-users, and its modify date is newer than zekr build date File indexDir = new File(pathArray[1]); SimpleFSDirectory dir = new SimpleFSDirectory(indexDir); if (IndexReader.indexExists(dir)) { Collection<File> listFiles = FileUtils.listFiles(indexDir, new String[] { "cfs" }, false); if (listFiles.size() > 0) { if (FileUtils.isFileNewer(listFiles.iterator().next(), GlobalConfig.ZEKR_BUILD_DATE)) { ZekrIndexReader res; res = newIndexReader(quranText, indexId, pathArray[1]); props.setProperty(indexPathKey, pathArray[1]); props.setProperty(indexVersionKey, GlobalConfig.ZEKR_BUILD_NUMBER); return res; } } } IndexCreator indexCreator = new IndexCreator(pathArray, quranText, LuceneAnalyzerFactory.getAnalyzer(quranText)); if (indexCreator.indexQuranText()) { props.setProperty(indexPathKey, indexCreator.getIndexDir()); props.setProperty(indexVersionKey, GlobalConfig.ZEKR_BUILD_NUMBER); return newIndexReader(quranText, indexId, indexCreator.getIndexDir()); } else { // a non-interruption (bad) exception occurred if (indexCreator.isIndexingErrorOccurred() && indexCreator.getIndexingException() != null) { MessageBoxUtils.showActionFailureError(indexCreator.getIndexingException()); } return null; } } } else { return zir; } } catch (Exception e) { throw new IndexingException(e); } }
From source file:cern.acet.tracing.input.file.tailer.PositionTailer.java
/** * Follows changes in the file, calling the PositionTailerListener's handle method for each new line. *///ww w .jav a 2s .com public void run() { RandomAccessFile reader = null; try { long last = 0; // The last time the file was checked for changes long position = 0; // position within the file // Open the file while (run && reader == null) { try { reader = new RandomAccessFile(file, RAF_MODE); } catch (FileNotFoundException e) { listener.fileNotFound(); } if (reader == null) { try { Thread.sleep(delayMillis); } catch (InterruptedException e) { } } else { // The current position in the file position = startingPosition == null ? file.length() : startingPosition; last = System.currentTimeMillis(); reader.seek(position); } } while (run) { boolean newer = FileUtils.isFileNewer(file, last); // IO-279, must be done first // Check the file length to see if it was rotated long length = file.length(); if (length < position) { // File was rotated listener.fileRotated(); // Reopen the reader after rotation try { // Ensure that the old file is closed iff we re-open it successfully RandomAccessFile save = reader; reader = new RandomAccessFile(file, RAF_MODE); position = 0; // close old file explicitly rather than relying on GC picking up previous RAF IOUtils.closeQuietly(save); } catch (FileNotFoundException e) { // in this case we continue to use the previous reader and position values listener.fileNotFound(); } continue; } else { // File was not rotated // See if the file needs to be read again if (length > position) { // The file has more content than it did last time position = readLines(reader); last = System.currentTimeMillis(); } else if (newer) { /* * This can happen if the file is truncated or overwritten with the exact same length of * information. In cases like this, the file position needs to be reset */ position = 0; reader.seek(position); // cannot be null here // Now we can read new lines position = readLines(reader); last = System.currentTimeMillis(); } } if (reOpen) { IOUtils.closeQuietly(reader); } try { Thread.sleep(delayMillis); } catch (InterruptedException e) { } if (run && reOpen) { reader = new RandomAccessFile(file, RAF_MODE); reader.seek(position); } } } catch (Exception e) { listener.handle(e); } finally { IOUtils.closeQuietly(reader); } }
From source file:com.stevpet.sonar.plugins.dotnet.mscover.sensor.IntegrationTestCoverSensor.java
private boolean transformationNeeded(String xmlPath, String coveragePath) { File xmlFile = new File(xmlPath); File coverageFile = new File(coveragePath); return !xmlFile.exists() || FileUtils.isFileNewer(coverageFile, xmlFile); }
From source file:com.datatorrent.lib.io.fs.TailFsInputOperator.java
private String readLine() throws IOException { StringBuffer sb = new StringBuffer(); char readChar; int ch;/*from www. j a v a 2 s .c om*/ long pos = reader.getFilePointer(); long length = file.length(); if ((length < pos) || (length == pos && FileUtils.isFileNewer(file, accessTime))) { // file got rotated or truncated reader.close(); reader = new RandomAccessFile(file, "r"); position = 0; reader.seek(position); pos = 0; } accessTime = System.currentTimeMillis(); while ((ch = reader.read()) != -1) { readChar = (char) ch; if (readChar != delimiter) { sb.append(readChar); } else { return sb.toString(); } } reader.seek(pos); return null; }
From source file:com.agilejava.docbkx.maven.AbstractTransformerMojo.java
/** * Builds the actual output document./*from w w w .j a v a2 s .c om*/ */ public void execute() throws MojoExecutionException, MojoFailureException { if (isSkip()) { getLog().info("Skipping plugin execution"); return; } // userland (ant tasks) pre process preProcess(); final File targetDirectory = getTargetDirectory(); final File sourceDirectory = getSourceDirectory(); if (!sourceDirectory.exists()) { return; // No sources, so there is nothing to render. } if (!targetDirectory.exists()) { org.codehaus.plexus.util.FileUtils.mkdir(targetDirectory.getAbsolutePath()); } final String[] included = scanIncludedFiles(); // configure a resolver for catalog files final CatalogManager catalogManager = createCatalogManager(); final CatalogResolver catalogResolver = new CatalogResolver(catalogManager); // configure a resolver for urn:dockbx:stylesheet final URIResolver uriResolver = createStyleSheetResolver(catalogResolver); // configure a resolver for xml entities final InjectingEntityResolver injectingResolver = createEntityResolver(catalogResolver); EntityResolver resolver = catalogResolver; if (injectingResolver != null) { resolver = injectingResolver; } // configure the builder for XSL Transforms final TransformerBuilder builder = createTransformerBuilder(uriResolver); // configure the XML parser SAXParserFactory factory = createParserFactory(); // iterate over included source files for (int i = included.length - 1; i >= 0; i--) { try { if (injectingResolver != null) { injectingResolver.forceInjection(); } final String inputFilename = included[i]; // targetFilename is inputFilename - ".xml" + targetFile extension String baseTargetFile = inputFilename.substring(0, inputFilename.length() - 4); final String targetFilename = baseTargetFile + "." + getTargetFileExtension(); final File sourceFile = new File(sourceDirectory, inputFilename); getLog().debug("SourceFile: " + sourceFile.toString()); // creating targetFile File targetFile = null; if (isUseStandardOutput()) { targetFile = new File(targetDirectory, targetFilename); getLog().debug("TargetFile: " + targetFile.toString()); } else { String name = new File(baseTargetFile).getName(); String dir = new File(baseTargetFile).getParent(); if (dir == null) { // file is located on root of targetDirectory targetFile = targetDirectory; } else { // else append the relative directory to targetDirectory targetFile = new File(targetDirectory, dir); } targetFile = new File(targetFile, name + "." + getTargetFileExtension()); getLog().debug("TargetDirectory: " + targetDirectory.getAbsolutePath()); } if (!targetFile.exists() || (targetFile.exists() && FileUtils.isFileNewer(sourceFile, targetFile)) || (targetFile.exists() && getXIncludeSupported())) { getLog().info("Processing input file: " + inputFilename); final XMLReader reader = factory.newSAXParser().getXMLReader(); // configure XML reader reader.setEntityResolver(resolver); // eval PI final PreprocessingFilter filter = createPIHandler(resolver, reader); // configure SAXSource for XInclude final Source xmlSource = createSource(inputFilename, sourceFile, filter); configureXref(targetFile); // XSL Transformation setup final Transformer transformer = builder.build(); adjustTransformer(transformer, sourceFile.getAbsolutePath(), targetFile); // configure the output file Result result = null; if (!shouldProcessResult()) { // if the output is not the main result of the transformation, ie xref database if (getLog().isDebugEnabled()) { result = new StreamResult(System.out); } else { result = new StreamResult(new NullOutputStream()); } } else if (isUseStandardOutput()) { // if the output of the main result is the standard output result = new StreamResult(targetFile.getAbsolutePath()); } else { // if the output of the main result is not the standard output if (getLog().isDebugEnabled()) { result = new StreamResult(System.out); } else { result = new StreamResult(new NullOutputStream()); } } transformer.transform(xmlSource, result); if (shouldProcessResult()) { // if the transformation has produce the expected main results, we can continue // the chain of processing in the output mojos which can override postProcessResult postProcessResult(targetFile); if (isUseStandardOutput()) { getLog().info(targetFile + " has been generated."); } else { getLog().info("See " + targetFile.getParentFile().getAbsolutePath() + " for generated file(s)"); } } else { // if the output is not the main result getLog().info("See " + targetFile.getParentFile().getAbsolutePath() + " for generated secondary file(s)"); } } else { getLog().info(targetFile + " is up to date."); } } catch (SAXException saxe) { throw new MojoExecutionException("Failed to parse " + included[i] + ".", saxe); } catch (TransformerException te) { throw new MojoExecutionException("Failed to transform " + included[i] + ".", te); } catch (ParserConfigurationException pce) { throw new MojoExecutionException("Failed to construct parser.", pce); } } // userland (ant tasks) post process postProcess(); }
From source file:de.joinout.criztovyl.tools.directory.DirectoryChanges.java
/** * Locates all changed files, does not include new or deleted files.<br> * As first there is created/received a map with hash-strings as keys and * {@link Path}s as values for the current and previous * {@link FileList} via {@link FileList#getMappedHashedModifications()}. * Then all keys of the previous map are removed from the current map and * the remaining values are returned.<br> * Only files which content changed are included. * // w ww .ja va2 s .c o m * @return a {@link Set} of {@link Path}s * @param forceRecalculate whether there should be a recalculation of the changed files * @see FileList#getMappedHashedModifications() */ public Set<Path> getChangedFiles(boolean forceRecalculate) { if (forceRecalculate || changed == null) { //(Re-)calculate if is wanted or there is no previous calculation // Get all new and deleted files, they are not included in the modified // files, add them to list which files are ignored final Set<Path> ignore = new HashSet<>(); ignore.addAll(getDeletedFiles()); ignore.addAll(getNewFiles()); if (logger.isDebugEnabled()) logger.debug("Files ignored: {}", new TreeSet<>(ignore)); // Create a map for modificated files and put modifications map from current directory final HashMap<String, Path> mod = new HashMap<>(current.getMappedHashedModifications(ignore)); //Receive modifications from previous directory Map<String, Path> mod_p = previous.getMappedHashedModifications(ignore); //Intersect map keys Set<String> intersection = new HashSet<>(mod.keySet()); intersection.retainAll(mod_p.keySet()); if (logger.isDebugEnabled()) { if (!(mod_p.size() > 500)) logger.debug("Modifications map of previous list: {}", new TreeMap<>(mod_p)); else logger.debug("Previous modification map is bigger than 500 elements, will not print out."); if (!(mod_p.size() > 500)) logger.debug("Modifications map of current list: {}", new TreeMap<>(mod)); else logger.debug("Current modification map is bigger than 500 elements, will not print out."); if (!(mod_p.size() > 500)) logger.debug("Intersection of above: {}", intersection); else logger.debug("Intersection set is bigger than 500 elements, will not print out."); } //Merge maps mod.putAll(mod_p); // Remove everything which is in both maps mod.keySet().removeAll(new TreeSet<>(intersection)); //Only files which contents changed stay in map //Iterate over keys for (Iterator<String> i = mod.keySet().iterator(); i.hasNext();) { //Get path Path path = mod.get(i.next()); //Check if file has changed (may throw I/O exception) try { if (contentChanged(path)) //Remove if is not newer then complement file if (!FileUtils.isFileNewer(path.getFile(), getComplementPath(path).getFile())) i.remove(); else ; //Has not changed, remove from map else i.remove(); } catch (IOException e) { //Catch IOException, remove from map to avoid further errors i.remove(); if (logger.isWarnEnabled()) logger.warn( "Caught IOException while testing if file is newer: \"{}\". Removing from modifications to prevent further errors.", path); if (logger.isDebugEnabled()) logger.debug(e); } } //Save for reuse changed = new HashSet<>(mod.values()); } //Return changed files return changed; }
From source file:com.dotmarketing.servlets.taillog.Tailer.java
/** * Follows changes in the file, calling the TailerListener's handle method for each new line. *///from w ww . j a v a 2 s . com public void run() { RandomAccessFile reader = null; try { long last = 0; // The last time the file was checked for changes long position = 0; // position within the file // Open the file while (run && reader == null) { try { reader = new RandomAccessFile(file, "r"); } catch (FileNotFoundException e) { listener.fileNotFound(); } if (reader == null) { try { Thread.sleep(delay); } catch (InterruptedException e) { } } else { // The current position in the file position = end ? file.length() : startPosition; last = System.currentTimeMillis(); reader.seek(position); readLine(reader); position = reader.getFilePointer(); } } while (run) { // Check the file length to see if it was rotated long length = file.length(); if (length < position) { // File was rotated listener.fileRotated(); // Reopen the reader after rotation try { // Ensure that the old file is closed iff we re-open it successfully RandomAccessFile save = reader; reader = new RandomAccessFile(file, "r"); position = 0; // close old file explicitly rather than relying on GC picking up previous RAF IOUtils.closeQuietly(save); } catch (FileNotFoundException e) { // in this case we continue to use the previous reader and position values listener.fileNotFound(); } continue; } else { // File was not rotated // See if the file needs to be read again if (length > position) { // The file has more content than it did last time last = System.currentTimeMillis(); position = readLines(reader); } else if (FileUtils.isFileNewer(file, last)) { /* This can happen if the file is truncated or overwritten * with the exact same length of information. In cases like * this, the file position needs to be reset */ position = 0; reader.seek(position); // cannot be null here // Now we can read new lines last = System.currentTimeMillis(); position = readLines(reader); } } try { Thread.sleep(delay); } catch (InterruptedException e) { } } } catch (Exception e) { listener.handle(e); } finally { try { reader.close(); } catch (Exception e) { Logger.error(this.getClass(), "Unable to close: " + e.getMessage()); } } }
From source file:com.mediaworx.intellij.opencmsplugin.opencms.OpenCmsModule.java
/** * @return the path of the newest module zip in the target folder, null if no module zip exists */// w ww.j a v a2 s . co m public String findNewestModuleZipPath() { String zipParentPath = getModuleBasePath() + "/" + plugin.getPluginConfiguration().getModuleZipTargetFolderPath(); Collection<File> moduleZips = FileUtils.listFiles(new File(zipParentPath), new String[] { "zip" }, false); File newestModuleZip = null; for (File moduleZip : moduleZips) { if (newestModuleZip == null || FileUtils.isFileNewer(moduleZip, newestModuleZip)) { newestModuleZip = moduleZip; } } if (newestModuleZip != null) { return newestModuleZip.getPath(); } else { return null; } }
From source file:com.excuseme.rocketleaguelivestats.scanner.tailer.Tailer.java
/** * Follows changes in the file, calling the TailerListener's handle method for each new line. *//*from www . j ava 2s . co m*/ public void run() { RandomAccessFile reader = null; try { long last = 0; // The last time the file was checked for changes long position = 0; // position within the file // Open the file while (run && reader == null) { try { reader = new RandomAccessFile(file, RAF_MODE); } catch (FileNotFoundException e) { listener.fileNotFound(); } if (reader == null) { try { Thread.sleep(delayMillis); } catch (InterruptedException e) { } } else { // The current position in the file position = end ? file.length() : 0; last = System.currentTimeMillis(); reader.seek(position); } } while (run) { boolean newer = FileUtils.isFileNewer(file, last); // IO-279, must be done first // Check the file length to see if it was rotated long length = file.length(); if (length < position) { // File was rotated listener.fileRotated(); // Reopen the reader after rotation try { // Ensure that the old file is closed iff we re-open it successfully RandomAccessFile save = reader; reader = new RandomAccessFile(file, RAF_MODE); position = 0; // close old file explicitly rather than relying on GC picking up previous RAF IOUtils.closeQuietly(save); } catch (FileNotFoundException e) { // in this case we continue to use the previous reader and position values listener.fileNotFound(); } continue; } else { // File was not rotated // See if the file needs to be read again if (length > position) { // The file has more content than it did last time position = readLines(reader); last = System.currentTimeMillis(); } else if (newer) { /* * This can happen if the file is truncated or overwritten with the exact same length of * information. In cases like this, the file position needs to be reset */ position = 0; reader.seek(position); // cannot be null here // Now we can read new lines position = readLines(reader); last = System.currentTimeMillis(); } } if (reOpen) { IOUtils.closeQuietly(reader); } try { Thread.sleep(delayMillis); } catch (InterruptedException e) { } if (run && reOpen) { reader = new RandomAccessFile(file, RAF_MODE); reader.seek(position); } } } catch (Exception e) { listener.handle(e); } finally { IOUtils.closeQuietly(reader); } }