List of usage examples for org.apache.hadoop.fs Path toUri
public URI toUri()
From source file:com.splunk.shuttl.archiver.filesystem.hadoop.HadoopArchiveFileSystemSlowTest.java
License:Apache License
@Test(groups = { "end-to-end" }) @Parameters(value = { "hadoop.host", "hadoop.port" }) public void rename_dirWithMultipleLevelsOfNonExistingFiles_renamesDirectory(String hadoopHost, String hadoopPort) throws IOException { FileSystem hadoopFileSystem = TUtilsFunctional.getHadoopFileSystem(hadoopHost, hadoopPort); String simpleClassName = getClass().getSimpleName(); Path path = new Path(simpleClassName + "/1/foo/dir/").makeQualified(hadoopFileSystem); Path otherRoot = new Path(simpleClassName + "/2/foo/dir").makeQualified(hadoopFileSystem); HadoopArchiveFileSystem realFileStructure = new HadoopArchiveFileSystem(hadoopFileSystem); try {//from w w w . j av a 2s . c om hadoopFileSystem.mkdirs(path); assertTrue(hadoopFileSystem.exists(path)); hadoopFileSystem.delete(otherRoot, true); assertFalse(hadoopFileSystem.exists(otherRoot)); // Test realFileStructure.rename(path.toUri().getPath(), otherRoot.toUri().getPath()); assertTrue(hadoopFileSystem.exists(otherRoot)); assertFalse(hadoopFileSystem.exists(path)); } finally { hadoopFileSystem.delete(new Path("/1"), true); hadoopFileSystem.delete(new Path("/2"), true); } }
From source file:com.splunk.shuttl.archiver.filesystem.HadoopFileSystemArchive.java
License:Apache License
/** * Do NOT call nor override this method outside this class.It's meant to be * private but is package private for testing purposes. If you want to expose * this method make it public or protected! * //from w w w . ja va 2s . com * The specified file will be copied from local file system in to the tmp * directory on hadoop. The tmp directory will be the base and the full path * of the file on hadoop will contains the specified URI. */ /* package private */Path putFileToTmpDirectoryOverwirtingOldFilesAppendingPath(File fileOnLocalFileSystem, URI appendPathToTmpDirectory) throws FileNotFoundException, IOException { Path hadoopPath = UtilsPath.createPathByAppending(atomicPutTmpPath, createPathFromURI(appendPathToTmpDirectory)); deletePathRecursivly(hadoopPath); try { putFile(fileOnLocalFileSystem, hadoopPath.toUri()); } catch (FileOverwriteException e) { throw new IOException("The old tmp path was not deleted this shouldn't happen!", e); } return hadoopPath; }
From source file:com.splunk.shuttl.archiver.filesystem.HadoopFileSystemArchiveTest.java
License:Apache License
public void getFile_validInput_fileShouldBeRetrived() throws IOException { File testFile = TUtilsFile.createFileWithRandomContent(); hadoopFileSystemPutter.putFile(testFile); Path hadoopPath = hadoopFileSystemPutter.getPathForFile(testFile); URI fileSystemPath = hadoopPath.toUri(); File retrivedFile = TUtilsFile.createFilePath(); // Test/* w w w. ja v a 2 s. c o m*/ hadoopFileSystemArchive.getFile(retrivedFile, fileSystemPath); // Confirm TUtilsTestNG.assertFileContentsEqual(testFile, retrivedFile); }
From source file:com.splunk.shuttl.archiver.filesystem.HadoopFileSystemArchiveTest.java
License:Apache License
@Test(expectedExceptions = FileOverwriteException.class) public void getFile_whenLocalFileAllreadyExist_fileOverwriteException() throws IOException, URISyntaxException { File testFile = TUtilsFile.createFileWithRandomContent(); hadoopFileSystemPutter.putFile(testFile); Path hadoopPath = hadoopFileSystemPutter.getPathForFile(testFile); URI fileSystemPath = hadoopPath.toUri(); File retrivedFile = TUtilsFile.createFileWithRandomContent(); // Test//from w ww . j a v a 2s . c o m hadoopFileSystemArchive.getFile(retrivedFile, fileSystemPath); }
From source file:com.splunk.shuttl.archiver.filesystem.HadoopFileSystemArchiveTest.java
License:Apache License
public void getFile_whenLocalFileAllreadyExist_localFileIsNotOverwritten() throws IOException, URISyntaxException { File testFile = TUtilsFile.createFileWithRandomContent(); hadoopFileSystemPutter.putFile(testFile); Path hadoopPath = hadoopFileSystemPutter.getPathForFile(testFile); URI fileSystemPath = hadoopPath.toUri(); File fileThatCouldBeOverwritten = TUtilsFile.createFileWithRandomContent(); File originalFile = TUtilsFile.createFileWithContentsOfFile(fileThatCouldBeOverwritten); try {/*www.j a v a2s .c o m*/ // Test hadoopFileSystemArchive.getFile(fileThatCouldBeOverwritten, fileSystemPath); } catch (Exception e) { // Intentionally ignoring. } // Confirm TUtilsTestNG.assertFileContentsEqual(originalFile, fileThatCouldBeOverwritten); }
From source file:com.splunk.shuttl.archiver.filesystem.HadoopFileSystemArchiveTest.java
License:Apache License
public void putFile_validInput_fileShouldBePutToFilesSystem() throws IOException { File testFile = TUtilsFile.createFileWithRandomContent(); Path hadoopPath = TUtilsPath.getSafeDirectory(fileSystem); URI fileSystemPath = hadoopPath.toUri(); // Test// w w w.j a va2s . com hadoopFileSystemArchive.putFile(testFile, fileSystemPath); // Confirm File retrivedFile = TUtilsFileSystem.getFileFromFileSystem(fileSystem, hadoopPath); TUtilsTestNG.assertFileContentsEqual(testFile, retrivedFile); }
From source file:com.splunk.shuttl.archiver.filesystem.HadoopFileSystemArchiveTest.java
License:Apache License
@Test(expectedExceptions = FileNotFoundException.class) public void putFile_whenLocalFileDoNotExist_fileNotFoundException() throws IOException { File testFile = TUtilsFile.createFilePath(); Path hadoopPath = TUtilsPath.getSafeDirectory(fileSystem); URI fileSystemPath = hadoopPath.toUri(); // Test// w ww . j a v a 2 s . c o m hadoopFileSystemArchive.putFile(testFile, fileSystemPath); }
From source file:com.splunk.shuttl.archiver.filesystem.HadoopFileSystemArchiveTest.java
License:Apache License
@Test(expectedExceptions = FileOverwriteException.class) public void putFile_whenRemoteFileExists_fileOverwriteException() throws IOException { File fileThatWouldBeOwerwriten = TUtilsFile.createFileWithRandomContent(); hadoopFileSystemPutter.putFile(fileThatWouldBeOwerwriten); Path hadoopPath = hadoopFileSystemPutter.getPathForFile(fileThatWouldBeOwerwriten); URI pathToRemoteFile = hadoopPath.toUri(); File testFile = TUtilsFile.createFileWithRandomContent(); // Test//from ww w . j av a2s . c o m hadoopFileSystemArchive.putFile(testFile, pathToRemoteFile); }
From source file:com.splunk.shuttl.archiver.filesystem.HadoopFileSystemArchiveTest.java
License:Apache License
public void putFile_whenRemoteFileExists_remoteFileShouldNotBeOverwriten() throws IOException { File fileThatWouldBeOwerwriten = TUtilsFile.createFileWithRandomContent(); hadoopFileSystemPutter.putFile(fileThatWouldBeOwerwriten); Path hadoopPath = hadoopFileSystemPutter.getPathForFile(fileThatWouldBeOwerwriten); URI pathToRemoteFile = hadoopPath.toUri(); File testFile = TUtilsFile.createFileWithRandomContent(); boolean didGetExeption = false; try {/*from www .j av a 2 s.c o m*/ // Test hadoopFileSystemArchive.putFile(testFile, pathToRemoteFile); } catch (FileOverwriteException e) { didGetExeption = true; } // Confirm assertTrue(didGetExeption); File fileAfterPut = TUtilsFile.createFilePath(); hadoopFileSystemArchive.getFile(fileAfterPut, pathToRemoteFile); TUtilsTestNG.assertFileContentsEqual("Put shouln't have overwritten the file.", fileThatWouldBeOwerwriten, fileAfterPut); }
From source file:com.splunk.shuttl.archiver.filesystem.HadoopFileSystemArchiveTest.java
License:Apache License
public void putFile_withDirectoryContainingAnotherDirectory_bothDirectoriesExistsInTheArchive() throws URISyntaxException, FileNotFoundException, FileOverwriteException, IOException { File parent = TUtilsFile.createDirectory(); String childFileName = "childDir"; TUtilsFile.createDirectoryInParent(parent, childFileName); Path parentPathOnHadoop = hadoopFileSystemPutter.getPathForFile(parent); hadoopFileSystemArchive.putFile(parent, parentPathOnHadoop.toUri()); assertTrue(fileSystem.exists(parentPathOnHadoop)); Path childPath = new Path(parentPathOnHadoop, childFileName); assertTrue(fileSystem.exists(childPath)); FileUtils.deleteDirectory(parent);//from w w w .j av a 2s. co m }