List of usage examples for org.apache.commons.vfs2 FileObject getName
FileName getName();
From source file:org.pentaho.hadoop.shim.hsp101.HadoopShim.java
@Override public void onLoad(HadoopConfiguration config, HadoopConfigurationFileSystemManager fsm) throws Exception { fsm.addProvider(config, "hdfs", config.getIdentifier(), new HDFSFileProvider()); setDistributedCacheUtil(new DistributedCacheUtilImpl(config) { /**/*from w ww. ja va2 s .c om*/ * Default permission for cached files * <p/> * Not using FsPermission.createImmutable due to EOFExceptions when using it with Hadoop 0.20.2 */ private final FsPermission CACHED_FILE_PERMISSION = new FsPermission((short) 0755); public void addFileToClassPath(Path file, Configuration conf) throws IOException { String classpath = conf.get("mapred.job.classpath.files"); conf.set("mapred.job.classpath.files", classpath == null ? file.toString() : classpath + getClusterPathSeparator() + file.toString()); FileSystem fs = FileSystem.get(conf); URI uri = fs.makeQualified(file).toUri(); DistributedCache.addCacheFile(uri, conf); } /** * Stages the source file or folder to a Hadoop file system and sets their permission and replication * value appropriately to be used with the Distributed Cache. WARNING: This will delete the contents of * dest before staging the archive. * * @param source File or folder to copy to the file system. If it is a folder all contents will be * copied into dest. * @param fs Hadoop file system to store the contents of the archive in * @param dest Destination to copy source into. If source is a file, the new file name will be * exactly dest. If source is a folder its contents will be copied into dest. For more * info see {@link FileSystem#copyFromLocalFile(org.apache.hadoop.fs.Path, * org.apache.hadoop.fs.Path)}. * @param overwrite Should an existing file or folder be overwritten? If not an exception will be * thrown. * @throws IOException Destination exists is not a directory * @throws KettleFileException Source does not exist or destination exists and overwrite is false. */ public void stageForCache(FileObject source, FileSystem fs, Path dest, boolean overwrite) throws IOException, KettleFileException { if (!source.exists()) { throw new KettleFileException(BaseMessages.getString(DistributedCacheUtilImpl.class, "DistributedCacheUtil.SourceDoesNotExist", source)); } if (fs.exists(dest)) { if (overwrite) { // It is a directory, clear it out fs.delete(dest, true); } else { throw new KettleFileException(BaseMessages.getString(DistributedCacheUtilImpl.class, "DistributedCacheUtil.DestinationExists", dest.toUri().getPath())); } } // Use the same replication we'd use for submitting jobs short replication = (short) fs.getConf().getInt("mapred.submit.replication", 10); copyFile(source, fs, dest, overwrite); fs.setReplication(dest, replication); } private void copyFile(FileObject source, FileSystem fs, Path dest, boolean overwrite) throws IOException { if (source.getType() == FileType.FOLDER) { fs.mkdirs(dest); fs.setPermission(dest, CACHED_FILE_PERMISSION); for (FileObject fileObject : source.getChildren()) { copyFile(fileObject, fs, new Path(dest, fileObject.getName().getBaseName()), overwrite); } } else { try (FSDataOutputStream fsDataOutputStream = fs.create(dest, overwrite)) { IOUtils.copy(source.getContent().getInputStream(), fsDataOutputStream); fs.setPermission(dest, CACHED_FILE_PERMISSION); } } } public String getClusterPathSeparator() { return System.getProperty("hadoop.cluster.path.separator", ","); } }); }
From source file:org.pentaho.metaverse.analyzer.kettle.extensionpoints.job.entry.JobEntryExternalResourceListenerTest.java
@Test public void testBeforeAfterExecution() throws Exception { IJobEntryExternalResourceConsumer consumer = mock(IJobEntryExternalResourceConsumer.class); JobMeta mockJobMeta = mock(JobMeta.class); Job job = mock(Job.class); when(job.getJobMeta()).thenReturn(mockJobMeta); JobEntryInterface jobEntryInterface = mock(JobEntryInterface.class); when(jobEntryInterface.getParentJob()).thenReturn(job); when(jobEntryInterface.getResourceDependencies(mockJobMeta)) .thenReturn(Collections.singletonList(new ResourceReference(null, Collections.singletonList(new ResourceEntry("myFile", ResourceEntry.ResourceType.FILE))))); JobEntryCopy jobEntryCopy = mock(JobEntryCopy.class); IExecutionProfile executionProfile = mock(IExecutionProfile.class); IExecutionData executionData = mock(IExecutionData.class); when(executionProfile.getExecutionData()).thenReturn(executionData); JobLineageHolderMap.getInstance().getLineageHolder(job).setExecutionProfile(executionProfile); JobEntryExternalResourceListener listener = new JobEntryExternalResourceListener(consumer); FileObject mockFile = mock(FileObject.class); FileName mockFilename = mock(FileName.class); when(mockFilename.getPath()).thenReturn("/path/to/file"); when(mockFile.getName()).thenReturn(mockFilename); ResultFile resultFile = mock(ResultFile.class); when(resultFile.getFile()).thenReturn(mockFile); List<ResultFile> resultFiles = Collections.singletonList(resultFile); Result result = mock(Result.class); when(result.getResultFilesList()).thenReturn(resultFiles); // Call beforeExecution for coverage listener.beforeExecution(null, null, null); listener.afterExecution(job, jobEntryCopy, jobEntryInterface, result); }
From source file:org.pentaho.metaverse.impl.VfsLineageCollector.java
@Override public List<String> listArtifacts(final String startingDate, final String endingDate) throws IllegalArgumentException { List<String> paths = new ArrayList<>(); try {//from w w w.j ava 2 s. c o m FileSystemOptions opts = new FileSystemOptions(); FileObject lineageRootFolder = KettleVFS.getFileObject(getOutputFolder(), opts); FileSelector dateRangeFilter = new VfsDateRangeFilter(format, startingDate, endingDate); FileSelector depthFilter = new FileDepthSelector(1, 256); if (lineageRootFolder.exists() && lineageRootFolder.getType() == FileType.FOLDER) { // get the folders that come on or after the startingDate FileObject[] dayFolders = lineageRootFolder.findFiles(dateRangeFilter); for (FileObject dayFolder : dayFolders) { FileObject[] listThisFolder = dayFolder.findFiles(depthFilter); for (FileObject currentFile : listThisFolder) { if (currentFile.getType() == FileType.FILE) { paths.add(currentFile.getName().getPath()); } } } } return paths; } catch (Exception e) { throw new IllegalArgumentException(e); } }
From source file:org.pentaho.metaverse.impl.VfsLineageCollector.java
@Override public List<String> listArtifactsForFile(String pathToArtifact, String startingDate, String endingDate) throws IllegalArgumentException { List<String> paths = new ArrayList<>(); try {//from ww w .j av a2 s. c o m FileSystemOptions opts = new FileSystemOptions(); FileObject lineageRootFolder = KettleVFS.getFileObject(getOutputFolder(), opts); FileSelector dateRangeFilter = new VfsDateRangeFilter(format, startingDate, endingDate); FileSelector depthFilter = new FileDepthSelector(1, 256); if (lineageRootFolder.exists() && lineageRootFolder.getType() == FileType.FOLDER) { // get all of the date folders of lineage we have FileObject[] dayFolders = lineageRootFolder.findFiles(dateRangeFilter); for (FileObject dayFolder : dayFolders) { FileObject[] listThisFolder = dayFolder.findFiles(depthFilter); for (FileObject currentFile : listThisFolder) { FileObject requested = currentFile.resolveFile(pathToArtifact); if (requested.exists() && requested.getType() == FileType.FOLDER) { FileObject[] requestedChildren = requested.getChildren(); for (FileObject requestedChild : requestedChildren) { if (requestedChild.getType() == FileType.FILE) { paths.add(requestedChild.getName().getPath()); } } } } } } return paths; } catch (Exception e) { throw new IllegalArgumentException(e); } }
From source file:org.pentaho.metaverse.impl.VfsLineageCollector.java
@Override public void compressArtifacts(List<String> paths, OutputStream os) { ZipOutputStream zos = null;// w w w.j a va 2s . c o m try { FileSystemOptions opts = new FileSystemOptions(); zos = new ZipOutputStream(os); for (String path : paths) { FileObject file = KettleVFS.getFileObject(path, opts); try { // register the file as an entry in the zip file ZipEntry zipEntry = new ZipEntry(file.getName().getPath()); zos.putNextEntry(zipEntry); // write the file's bytes to the zip stream try (InputStream fis = file.getContent().getInputStream()) { zos.write(IOUtils.toByteArray(fis)); } } catch (IOException e) { log.error(Messages.getString("ERROR.FailedAddingFileToZip", file.getName().getPath())); } finally { // indicate we are done with this file try { zos.closeEntry(); } catch (IOException e) { log.error(Messages.getString("ERROR.FailedToProperlyCloseZipEntry", file.getName().getPath())); } } } } catch (KettleFileException e) { log.error(Messages.getString("ERROR.UnexpectedVfsError", e.getMessage())); } finally { IOUtils.closeQuietly(zos); } }
From source file:org.pentaho.metaverse.impl.VfsLineageWriter.java
protected FileObject getOutputDirectoryAsFile(LineageHolder holder) { try {/*from w w w. ja v a2 s . com*/ FileObject dateRootFolder = getDateFolder(holder); dateRootFolder.createFolder(); String id = holder.getId() == null ? "unknown_artifact" : holder.getId(); if (id.startsWith(File.separator)) { // For *nix id = id.substring(1); } else if (Const.isWindows() && id.charAt(1) == ':') { // For windows id = id.replaceFirst(Pattern.quote(":"), ""); } try { FileObject folder = dateRootFolder.resolveFile(id); folder.createFolder(); if (folder.isFile()) { // must be a folder throw new IllegalStateException( Messages.getErrorString("ERROR.OutputFolderWrongType", folder.getName().getPath())); } return folder; } catch (Exception e) { log.error(Messages.getErrorString("ERROR.CouldNotCreateFile"), e); return null; } } catch (Exception e) { log.error(Messages.getErrorString("ERROR.CouldNotCreateFile"), e); throw new IllegalStateException(e); } }
From source file:org.pentaho.metaverse.impl.VfsLineageWriter.java
/** * Method called on the writer to do any cleanup of the output artifacts, folders, etc. *///from w w w . j av a 2s .co m @Override public void cleanOutput(LineageHolder holder) { String folderName = "unknown"; try { FileObject folder = getOutputDirectoryAsFile(holder); folderName = folder.getName().getPath(); folder.deleteAll(); } catch (IOException ioe) { log.error(Messages.getErrorString("ERROR.CouldNotDeleteFile", folderName), ioe); } }
From source file:org.pentaho.metaverse.impl.VfsLineageWriterTest.java
@Test public void testGetDateFolder() throws KettleFileException, FileSystemException { assertNotNull(writer.getDateFolder(null)); FileObject folder = writer.getDateFolder(holder); assertNotNull(folder);/* ww w . j a va 2 s . co m*/ assertTrue(folder.getName().getPath().endsWith(VfsLineageWriter.dateFolderFormat.format(now))); writer.setOutputFolder("file://root"); folder = writer.getDateFolder(holder); assertTrue( folder.getName().getPath().endsWith("root" + "/" + VfsLineageWriter.dateFolderFormat.format(now))); }
From source file:org.pentaho.metaverse.util.VfsDateRangeFilterTest.java
@Test public void testAccept_startDateSet() throws Exception { filter = new VfsDateRangeFilter(format, start); FileSelectInfo fsi = mock(FileSelectInfo.class); FileObject fo = mock(FileObject.class); FileName fn = mock(FileName.class); when(fn.getBaseName()).thenReturn(end); when(fo.getType()).thenReturn(FileType.FOLDER); when(fo.getName()).thenReturn(fn); when(fsi.getFile()).thenReturn(fo);/*www .j a v a 2 s. c om*/ when(fsi.getDepth()).thenReturn(1); assertTrue(filter.includeFile(fsi)); when(fn.getBaseName()).thenReturn(start); assertTrue("Start date is not inclusive", filter.includeFile(fsi)); when(fn.getBaseName()).thenReturn("20000101"); assertFalse("Before start date was accepted", filter.includeFile(fsi)); }
From source file:org.pentaho.metaverse.util.VfsDateRangeFilterTest.java
@Test public void testAccept_startDateSet_endDateSet() throws Exception { filter = new VfsDateRangeFilter(format, start, end); FileSelectInfo fsi = mock(FileSelectInfo.class); FileObject fo = mock(FileObject.class); FileName fn = mock(FileName.class); when(fn.getBaseName()).thenReturn(end); when(fo.getType()).thenReturn(FileType.FOLDER); when(fo.getName()).thenReturn(fn); when(fsi.getFile()).thenReturn(fo);/* w ww .ja va 2 s . c om*/ when(fsi.getDepth()).thenReturn(1); assertTrue("End date is not inclusive", filter.includeFile(fsi)); when(fn.getBaseName()).thenReturn(start); assertTrue("Start date is not inclusive", filter.includeFile(fsi)); when(fn.getBaseName()).thenReturn(between); assertTrue("Between start and end date is not accepted", filter.includeFile(fsi)); when(fn.getBaseName()).thenReturn("20000101"); assertFalse("Before start date was accepted", filter.includeFile(fsi)); when(fn.getBaseName()).thenReturn("21000101"); assertFalse("After end date was accepted", filter.includeFile(fsi)); }