List of usage examples for java.nio.file.attribute BasicFileAttributes lastAccessTime
FileTime lastAccessTime();
From source file:com.spectralogic.ds3client.metadata.MACMetadataRestore_Test.java
@Test public void restoreFileTimes_Test() throws Exception { if (Platform.isMac()) { final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("dd-MM-yy:HH:mm"); final BasicFileAttributes attr = Files.readAttributes(file.toPath(), BasicFileAttributes.class); final BasicHeader basicHeader[] = new BasicHeader[3]; basicHeader[0] = new BasicHeader( MetadataKeyConstants.METADATA_PREFIX + MetadataKeyConstants.KEY_CREATION_TIME, String.valueOf(attr.creationTime().toMillis())); basicHeader[1] = new BasicHeader( MetadataKeyConstants.METADATA_PREFIX + MetadataKeyConstants.KEY_ACCESS_TIME, String.valueOf(attr.lastAccessTime().toMillis())); basicHeader[2] = new BasicHeader( MetadataKeyConstants.METADATA_PREFIX + MetadataKeyConstants.KEY_LAST_MODIFIED_TIME, String.valueOf(attr.lastModifiedTime().toMillis())); final Metadata metadata = genMetadata(basicHeader); final MACMetadataRestore macMetadataRestore = new MACMetadataRestore(metadata, file.getPath(), MetaDataUtil.getOS());/* w ww . ja va2s . c o m*/ macMetadataRestore.restoreFileTimes(); final BasicFileAttributes fileAttributes = Files.readAttributes(file.toPath(), BasicFileAttributes.class); Assert.assertEquals(simpleDateFormat.format(fileAttributes.creationTime().toMillis()), simpleDateFormat.format(Long.valueOf(basicHeader[0].getValue()))); Assert.assertEquals(simpleDateFormat.format(fileAttributes.lastModifiedTime().toMillis()), simpleDateFormat.format(Long.valueOf(basicHeader[2].getValue()))); } }
From source file:fr.inria.soctrace.tools.ocelotl.core.caches.DichotomyCache.java
/** * Remove a cache file. The used policy is to suppress the file which has * the oldest accessed time//from www. java 2s . c o m */ public void removeCacheFile() { // Init with current time FileTime oldestDate = FileTime.from(System.currentTimeMillis(), null); CacheParameters oldestParam = null; for (CacheParameters aCacheParam : cachedDichotomy.keySet()) { try { // Get the last access to the file Path path = cachedDichotomy.get(aCacheParam).toPath(); BasicFileAttributes attrs; attrs = Files.readAttributes(path, BasicFileAttributes.class); FileTime currentTime = attrs.lastAccessTime(); // If the access is older than the current oldest if (currentTime.compareTo(oldestDate) < 0) { oldestDate = currentTime; oldestParam = aCacheParam; } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } // Delete oldest accessed cache if (!cachedDichotomy.get(oldestParam).delete()) { logger.debug("[DICHOTOMY CACHE]: Deletion of cache file " + cachedDichotomy.get(oldestParam).getName() + " failed."); } cachedDichotomy.remove(oldestParam); }
From source file:de.tiqsolutions.hdfs.HadoopFileSystemProvider.java
@Override public void copy(Path source, Path target, CopyOption... options) throws IOException { List<CopyOption> optionList = Arrays.asList(options); if (!optionList.contains(StandardCopyOption.REPLACE_EXISTING)) { if (Files.exists(target)) throw new java.nio.file.FileAlreadyExistsException(source.toString(), target.toString(), "could not copy file to destination"); } else {/*from w w w.jav a 2 s .c o m*/ Files.deleteIfExists(target); } FileSystem sourceFS = source.getFileSystem(); FileSystem targetFS = target.getFileSystem(); if (optionList.contains(HadoopCopyOption.REMOTE_COPY) && sourceFS.equals(targetFS)) { remoteCopy(source, target, options); return; } try (SeekableByteChannel sourceChannel = sourceFS.provider().newByteChannel(source, EnumSet.of(StandardOpenOption.READ))) { Set<StandardOpenOption> openOptions = EnumSet.of(StandardOpenOption.WRITE); if (optionList.contains(StandardCopyOption.REPLACE_EXISTING)) openOptions.add(StandardOpenOption.CREATE); else openOptions.add(StandardOpenOption.CREATE_NEW); List<FileAttribute<?>> fileAttributes = new ArrayList<>(); if (optionList.contains(StandardCopyOption.COPY_ATTRIBUTES)) { Set<String> sourceAttrViews = sourceFS.supportedFileAttributeViews(); Set<String> targetAttrViews = targetFS.supportedFileAttributeViews(); if (sourceAttrViews.contains(PosixFileAttributeViewImpl.NAME) && targetAttrViews.contains(PosixFileAttributeViewImpl.NAME)) { PosixFileAttributes posixAttributes = sourceFS.provider().readAttributes(source, PosixFileAttributes.class); fileAttributes.add(PosixFilePermissions.asFileAttribute(posixAttributes.permissions())); } if (sourceAttrViews.contains(HadoopFileAttributeViewImpl.NAME) && targetAttrViews.contains(HadoopFileAttributeViewImpl.NAME)) { final HadoopFileAttributes hdfsAttributes = sourceFS.provider().readAttributes(source, HadoopFileAttributes.class); fileAttributes.add(new FileAttribute<Long>() { @Override public String name() { return HadoopFileAttributeViewImpl.NAME + ":blockSize"; } @Override public Long value() { return hdfsAttributes.getBlockSize(); } }); fileAttributes.add(new FileAttribute<Short>() { @Override public String name() { return HadoopFileAttributeViewImpl.NAME + ":replication"; } @Override public Short value() { return hdfsAttributes.getReplication(); } }); } } FileAttribute<?>[] attributes = fileAttributes.toArray(new FileAttribute<?>[fileAttributes.size()]); try (SeekableByteChannel targetChannel = targetFS.provider().newByteChannel(target, openOptions, attributes)) { int buffSize = getConfiguration().getInt(DFSConfigKeys.DFS_STREAM_BUFFER_SIZE_KEY, DFSConfigKeys.DFS_STREAM_BUFFER_SIZE_DEFAULT); ByteBuffer buffer = ByteBuffer.allocate(buffSize); buffer.clear(); while (sourceChannel.read(buffer) > 0) { buffer.flip(); targetChannel.write(buffer); buffer.clear(); } } if (optionList.contains(StandardCopyOption.COPY_ATTRIBUTES)) { BasicFileAttributes attrs = sourceFS.provider().readAttributes(source, BasicFileAttributes.class); BasicFileAttributeView view = targetFS.provider().getFileAttributeView(target, BasicFileAttributeView.class); view.setTimes(attrs.lastModifiedTime(), attrs.lastAccessTime(), attrs.creationTime()); } } }
From source file:fr.inria.soctrace.tools.ocelotl.core.caches.DataCache.java
/** * Remove a cache file. The used policy is to suppress the file which has * the oldest accessed time//from w ww .ja va2 s . c om */ public void removeCacheFile() { // Init with current time FileTime oldestDate = FileTime.from(System.currentTimeMillis(), null); CacheParameters oldestParam = null; for (CacheParameters aCacheParam : cachedData.keySet()) { try { // Get the last access to the file Path path = cachedData.get(aCacheParam).toPath(); BasicFileAttributes attrs; attrs = Files.readAttributes(path, BasicFileAttributes.class); FileTime currentTime = attrs.lastAccessTime(); // If the access is older than the current oldest if (currentTime.compareTo(oldestDate) < 0) { oldestDate = currentTime; oldestParam = aCacheParam; } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } // Delete oldest accessed cache if (!cachedData.get(oldestParam).delete()) { logger.debug("DataCache: Deletion of cache file " + cachedData.get(oldestParam).getName() + " failed."); } cachedData.remove(oldestParam); }
From source file:de.tiqsolutions.hdfs.BasicFileAttributeViewImpl.java
Map<String, Object> readAttributes(String attributes) throws IOException { BasicFileAttributes attr = readAttributes(); List<String> attrlist = Arrays.asList(attributes.split(",")); boolean readall = attrlist.contains("*"); Map<String, Object> ret = new HashMap<>(); if (readall || attrlist.contains("fileKey")) ret.put("fileKey", attr.fileKey()); if (readall || attrlist.contains("creationTime")) ret.put("creationTime", attr.creationTime()); if (readall || attrlist.contains("isDirectory")) ret.put("isDirectory", attr.isDirectory()); if (readall || attrlist.contains("isOther")) ret.put("isOther", attr.isOther()); if (readall || attrlist.contains("isRegularFile")) ret.put("isRegularFile", attr.isRegularFile()); if (readall || attrlist.contains("isSymbolicLink")) ret.put("isSymbolicLink", attr.isSymbolicLink()); if (readall || attrlist.contains("lastAccessTime")) ret.put("lastAccessTime", attr.lastAccessTime()); if (readall || attrlist.contains("lastModifiedTime")) ret.put("lastModifiedTime", attr.lastModifiedTime()); if (readall || attrlist.contains("size")) ret.put("size", attr.size()); return ret;/* ww w . j a v a 2 s .c o m*/ }
From source file:edu.ku.brc.util.FileCache.java
/** * Returns the last access time of the item cached under the given key. The * time returned is in milliseconds since January 1, 1970 UTC. This uses * {@link System#currentTimeMillis()} internally. If the given key is not * found in the cache, {@value Long#MIN_VALUE} is returned. * /*from ww w.java 2 s.co m*/ * @param key the key for the cached item * @return the last time the item was accessed */ public synchronized long getLastAccessTime(final String key) { try { String filename = (String) handleToFilenameHash.get(key); File file = new File(filename); if (file != null) { Path p = Paths.get(file.getAbsoluteFile().toURI()); BasicFileAttributes view = Files.getFileAttributeView(p, BasicFileAttributeView.class) .readAttributes(); //log.debug(key+" -> "+view.lastAccessTime()+" "+view.lastAccessTime().toMillis()); //System.out.println(view.creationTime()+" is the same as "+view.lastAccessTime()+" "+view.lastAccessTime().toMillis()); return view.lastAccessTime().toMillis(); } } catch (IOException ex) { } return Calendar.getInstance().getTimeInMillis(); }
From source file:com.upplication.s3fs.util.AmazonS3ClientMock.java
private S3Element parse(Path elem, Path bucket) throws IOException { S3Object object = new S3Object(); String bucketName = bucket.getFileName().toString(); object.setBucketName(bucketName);/* w ww .ja v a 2 s. co m*/ String key = bucket.relativize(elem).toString().replaceAll("%2F", "/"); boolean dir = key.endsWith("/") || key.isEmpty(); object.setKey(key); ObjectMetadata metadata = new ObjectMetadata(); BasicFileAttributes attr = Files.readAttributes(elem, BasicFileAttributes.class); metadata.setLastModified(new Date(attr.lastAccessTime().toMillis())); if (dir) { metadata.setContentLength(0); object.setObjectContent(null); } else { metadata.setContentLength(attr.size()); object.setObjectContent(new ByteArrayInputStream(Files.readAllBytes(elem))); } object.setObjectMetadata(metadata); AccessControlList permission = createAclPermission(elem, bucketName); return new S3Element(object, permission, dir); }
From source file:gobblin.example.simplejson.SimpleJsonSource.java
@Override public List<WorkUnit> getWorkunits(SourceState state) { List<WorkUnit> workUnits = Lists.newArrayList(); if (!state.contains(ConfigurationKeys.SOURCE_FILEBASED_FILES_TO_PULL)) { return workUnits; }/* ww w . j ava 2s. c o m*/ // Create a single snapshot-type extract for all files Extract extract = new Extract(state, Extract.TableType.SNAPSHOT_ONLY, state.getProp(ConfigurationKeys.EXTRACT_NAMESPACE_NAME_KEY, "ExampleNamespace"), "ExampleTable"); String filesToPull = state.getProp(ConfigurationKeys.SOURCE_FILEBASED_FILES_TO_PULL); File tempFileDir = new File("test_temp/"); // TODO: Delete the dir after completion. tempFileDir.mkdir(); String tempFileDirAbsolute = ""; try { tempFileDirAbsolute = tempFileDir.getCanonicalPath(); // Retrieve absolute path of temp folder } catch (IOException e) { e.printStackTrace(); } int nameCount = 0; int csvCount = 0; for (String file : Splitter.on(',').omitEmptyStrings().split(filesToPull)) { Iterator it = FileUtils.iterateFiles(new File(file), null, true); while (it.hasNext()) { try { File newFile = (File) it.next(); String basePath = newFile.getCanonicalPath(); // Retrieve absolute path of source Path path = newFile.toPath(); //call to rest api:, provide with file basePath String extension = ""; System.out.println("basePath is" + basePath); int i = basePath.lastIndexOf('.'); System.out.println("i"); if (i > 0) { extension = basePath.substring(i + 1); } String url_file_name = ""; int j = basePath.lastIndexOf('/'); if (j > 0) { url_file_name = basePath.substring(j + 1); } //hand off to rest api if (extension.equals("csv")) { System.out.println("CSVCSVCSV"); csvCount += 1; System.out.println("CURL________________________________________"); //Include basePath, filename, location you want to store file System.out.println( "curl http://localhost:8080" + basePath + "/" + Integer.toString(nameCount)); //10.0.2.2 is localhost from vagrant // Insert the nameCount so that it can be joined back later. Process p = Runtime.getRuntime() .exec("curl http://localhost:8080" + basePath + "/" + Integer.toString(nameCount)); // String myUrl = "http://localhost:8080/parse" + basePath + "&" + url_file_name + "&" + tempFileDirAbsolute; // System.out.println("------------------------------"); // System.out.println(myUrl); // try { // URL url = new URL(myUrl); // HttpURLConnection connection = (HttpURLConnection) url.openConnection(); // connection.setRequestMethod("GET"); // connection.connect(); // } catch (Exception e) { // e.printStackTrace(); // } } // Print filename and associated metadata System.out.println(basePath); BasicFileAttributes attr = Files.readAttributes(path, BasicFileAttributes.class); // System.out.println(" creationTime: " + attr.creationTime()); // System.out.println(" lastAccessTime: " + attr.lastAccessTime()); // System.out.println(" lastModifiedTime: " + attr.lastModifiedTime()); // System.out.println(" isDirectory: " + attr.isDirectory()); // System.out.println(" isOther: " + attr.isOther()); // System.out.println(" isRegularFile: " + attr.isRegularFile()); // System.out.println(" isSymbolicLink: " + attr.isSymbolicLink()); // System.out.println(" size: " + attr.size()); // System.out.println(" "); //creating intermediate JSON JSONObject intermediate = new JSONObject(); intermediate.put("filename", basePath); intermediate.put("timestamp", String.valueOf((new Date()).getTime())); intermediate.put("namespace", getMacAddress()); intermediate.put("creationTime", String.valueOf(attr.creationTime())); intermediate.put("lastAccessTime", String.valueOf(attr.lastAccessTime())); intermediate.put("lastModifiedTime", String.valueOf(attr.lastModifiedTime())); intermediate.put("isDirectory", String.valueOf(attr.isDirectory())); intermediate.put("isOther", String.valueOf(attr.isOther())); intermediate.put("isRegularFile", String.valueOf(attr.isRegularFile())); intermediate.put("isSymbolicLink", String.valueOf(attr.isSymbolicLink())); intermediate.put("size", attr.size()); // Create intermediate temp file nameCount += 1; String intermediateName = "/generated" + String.valueOf(nameCount) + ".json"; String finalName = tempFileDirAbsolute + intermediateName; FileWriter generated = new FileWriter(finalName); generated.write(intermediate.toJSONString()); generated.flush(); generated.close(); // Create one work unit for each file to pull WorkUnit workUnit = new WorkUnit(state, extract); workUnit.setProp(SOURCE_FILE_KEY, finalName); workUnits.add(workUnit); } catch (IOException e) { e.printStackTrace(); } } // write out number of files found to temp file try { FileWriter numCsvFiles = new FileWriter(tempFileDirAbsolute + "/numCsvFiles.txt"); numCsvFiles.write("" + csvCount); numCsvFiles.flush(); numCsvFiles.close(); FileWriter numFiles = new FileWriter(tempFileDirAbsolute + "/numFiles.txt"); numFiles.write("" + nameCount); numFiles.flush(); numFiles.close(); } catch (IOException e) { e.printStackTrace(); } } return workUnits; }
From source file:net.mozq.picto.core.ProcessCore.java
public static void findFiles(ProcessCondition processCondition, Consumer<ProcessData> processDataSetter, BooleanSupplier processStopper) throws IOException { Set<FileVisitOption> fileVisitOptionSet; if (processCondition.isFollowLinks()) { fileVisitOptionSet = EnumSet.of(FileVisitOption.FOLLOW_LINKS); } else {//from w w w.j a v a 2s . c o m fileVisitOptionSet = Collections.emptySet(); } Files.walkFileTree(processCondition.getSrcRootPath(), fileVisitOptionSet, processCondition.getDept(), new SimpleFileVisitor<Path>() { @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { if (processStopper.getAsBoolean()) { return FileVisitResult.TERMINATE; } return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (attrs.isDirectory()) { return FileVisitResult.SKIP_SUBTREE; } if (processStopper.getAsBoolean()) { return FileVisitResult.TERMINATE; } if (!processCondition.getPathFilter().accept(file, attrs)) { return FileVisitResult.SKIP_SUBTREE; } Path rootRelativeSubPath = processCondition.getSrcRootPath().relativize(file.getParent()); ImageMetadata imageMetadata = getImageMetadata(file); Date baseDate; if (processCondition.isChangeFileCreationDate() || processCondition.isChangeFileModifiedDate() || processCondition.isChangeFileAccessDate() || processCondition.isChangeExifDate()) { baseDate = getBaseDate(processCondition, file, attrs, imageMetadata); } else { baseDate = null; } String destSubPathname = processCondition.getDestSubPathFormat().format(varName -> { try { switch (varName) { case "Now": return new Date(); case "ParentSubPath": return rootRelativeSubPath.toString(); case "FileName": return file.getFileName().toString(); case "BaseName": return FileUtilz.getBaseName(file.getFileName().toString()); case "Extension": return FileUtilz.getExt(file.getFileName().toString()); case "Size": return Long.valueOf(Files.size(file)); case "CreationDate": return (processCondition.isChangeFileCreationDate()) ? baseDate : new Date(attrs.creationTime().toMillis()); case "ModifiedDate": return (processCondition.isChangeFileModifiedDate()) ? baseDate : new Date(attrs.lastModifiedTime().toMillis()); case "AccessDate": return (processCondition.isChangeFileAccessDate()) ? baseDate : new Date(attrs.lastAccessTime().toMillis()); case "PhotoTakenDate": return (processCondition.isChangeExifDate()) ? baseDate : getPhotoTakenDate(file, imageMetadata); case "Width": return getEXIFIntValue(imageMetadata, ExifTagConstants.EXIF_TAG_EXIF_IMAGE_WIDTH); case "Height": return getEXIFIntValue(imageMetadata, ExifTagConstants.EXIF_TAG_EXIF_IMAGE_LENGTH); case "FNumber": return getEXIFDoubleValue(imageMetadata, ExifTagConstants.EXIF_TAG_FNUMBER); case "Aperture": return getEXIFDoubleValue(imageMetadata, ExifTagConstants.EXIF_TAG_APERTURE_VALUE); case "MaxAperture": return getEXIFDoubleValue(imageMetadata, ExifTagConstants.EXIF_TAG_MAX_APERTURE_VALUE); case "ISO": return getEXIFIntValue(imageMetadata, ExifTagConstants.EXIF_TAG_ISO); case "FocalLength": return getEXIFDoubleValue(imageMetadata, ExifTagConstants.EXIF_TAG_FOCAL_LENGTH); // ? case "FocalLength35mm": return getEXIFDoubleValue(imageMetadata, ExifTagConstants.EXIF_TAG_FOCAL_LENGTH_IN_35MM_FORMAT); case "ShutterSpeed": return getEXIFDoubleValue(imageMetadata, ExifTagConstants.EXIF_TAG_SHUTTER_SPEED_VALUE); case "Exposure": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_EXPOSURE); // case "ExposureTime": return getEXIFDoubleValue(imageMetadata, ExifTagConstants.EXIF_TAG_EXPOSURE_TIME); // case "ExposureMode": return getEXIFIntValue(imageMetadata, ExifTagConstants.EXIF_TAG_EXPOSURE_MODE); case "ExposureProgram": return getEXIFIntValue(imageMetadata, ExifTagConstants.EXIF_TAG_EXPOSURE_PROGRAM); case "Brightness": return getEXIFDoubleValue(imageMetadata, ExifTagConstants.EXIF_TAG_BRIGHTNESS_VALUE); case "WhiteBalance": return getEXIFIntValue(imageMetadata, ExifTagConstants.EXIF_TAG_WHITE_BALANCE_1); case "LightSource": return getEXIFIntValue(imageMetadata, ExifTagConstants.EXIF_TAG_LIGHT_SOURCE); case "Lens": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_LENS); case "LensMake": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_LENS_MAKE); case "LensModel": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_LENS_MODEL); case "LensSerialNumber": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_LENS_SERIAL_NUMBER); case "Make": return getEXIFStringValue(imageMetadata, TiffTagConstants.TIFF_TAG_MAKE); case "Model": return getEXIFStringValue(imageMetadata, TiffTagConstants.TIFF_TAG_MODEL); case "SerialNumber": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_SERIAL_NUMBER); case "Software": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_SOFTWARE); case "ProcessingSoftware": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_PROCESSING_SOFTWARE); case "OwnerName": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_OWNER_NAME); case "CameraOwnerName": return getEXIFStringValue(imageMetadata, ExifTagConstants.EXIF_TAG_CAMERA_OWNER_NAME); case "GPSLat": return getEXIFGpsLat(imageMetadata); case "GPSLatDeg": return getEXIFDoubleValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_LATITUDE, 0); case "GPSLatMin": return getEXIFDoubleValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_LATITUDE, 1); case "GPSLatSec": return getEXIFDoubleValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_LATITUDE, 2); case "GPSLatRef": return getEXIFStringValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_LATITUDE_REF); case "GPSLon": return getEXIFGpsLon(imageMetadata); case "GPSLonDeg": return getEXIFDoubleValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_LONGITUDE, 0); case "GPSLonMin": return getEXIFDoubleValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_LONGITUDE, 1); case "GPSLonSec": return getEXIFDoubleValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_LONGITUDE, 2); case "GPSLonRef": return getEXIFStringValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_LONGITUDE_REF); case "GPSAlt": return getEXIFDoubleValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_ALTITUDE); case "GPSAltRef": return getEXIFIntValue(imageMetadata, GpsTagConstants.GPS_TAG_GPS_ALTITUDE_REF); default: throw new PictoInvalidDestinationPathException(Messages .getString("message.warn.invalid.destSubPath.varName", varName)); } } catch (PictoException e) { throw e; } catch (Exception e) { throw new PictoInvalidDestinationPathException( Messages.getString("message.warn.invalid.destSubPath.pattern"), e); } }); Path destSubPath = processCondition.getDestRootPath().resolve(destSubPathname).normalize(); if (!destSubPath.startsWith(processCondition.getDestRootPath())) { throw new PictoInvalidDestinationPathException( Messages.getString("message.warn.invalid.destination.path", destSubPath)); } ProcessData processData = new ProcessData(); processData.setSrcPath(file); processData.setSrcFileAttributes(attrs); processData.setDestPath(destSubPath); processData.setBaseDate(baseDate); processDataSetter.accept(processData); return FileVisitResult.CONTINUE; } }); }
From source file:oracle.kv.sample.fileloader.FileLoader.java
/** * After successful validation this method is run to load the content of a * TXT file into the given table/*from w w w . ja v a 2s .co m*/ * * @throws IOException */ private void loadData() throws IOException { if (inputPathStr != null) { dir = new File(inputPathStr); File file = null; File[] files = null; int len = 0; // If input path is a directory then load data from all the files // that are under the directory. Make sure all the files are of same // type // i.e. TXT (mix and match is not allowed) if (dir.exists()) { System.out.println("There are " + dir.listFiles().length + " to be loaded."); if (dir.isDirectory()) { files = dir.listFiles(); len = files.length; // loop through all the files and load the content one by // one for (int i = 0; i < len; i++) { file = files[i]; try { Path filePath = Paths.get(file.getPath()); BasicFileAttributes attr = Files.readAttributes(filePath, BasicFileAttributes.class, LinkOption.NOFOLLOW_LINKS); DateFormat formatter = new SimpleDateFormat("MM/dd/yyyy"); FileOwnerAttributeView fileOwnerAttributeView = Files.getFileAttributeView(filePath, FileOwnerAttributeView.class); UserPrincipal userPrincipal = fileOwnerAttributeView.getOwner(); id = Integer.toString(i); fileDate = formatter.format(attr.lastAccessTime().toMillis()); fileOwner = userPrincipal.getName(); binaryFile = FileUtils.readFileToByteArray(file); row = table.createRow(); row.put("id", id); row.put("date", fileDate.toString()); row.put("owner", fileOwner); row.put("file", binaryFile); tableh.put(row, null, null); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } } else { System.out.println("There are no Files to Load"); } } } if (fileId != null) { getData(); } }