List of usage examples for org.apache.hadoop.fs Path SEPARATOR_CHAR
char SEPARATOR_CHAR
To view the source code for org.apache.hadoop.fs Path SEPARATOR_CHAR.
Click Source Link
From source file:org.apache.manifoldcf.crawler.connectors.hdfs.HDFSRepositoryConnector.java
License:Apache License
/** Match a sub-path. The sub-path must match the complete starting part of the full path, in a path * sense. The returned value should point into the file name beyond the end of the matched path, or * be -1 if there is no match.//w ww . j a v a 2 s . c o m *@param subPath is the sub path. *@param fullPath is the full path. *@return the index of the start of the remaining part of the full path, or -1. */ protected static int matchSubPath(String subPath, String fullPath) { if (subPath.length() > fullPath.length()) return -1; if (fullPath.startsWith(subPath) == false) return -1; int rval = subPath.length(); if (fullPath.length() == rval) return rval; char x = fullPath.charAt(rval); if (x == Path.SEPARATOR_CHAR) rval++; return rval; }
From source file:org.apache.sentry.hdfs.HMSPaths.java
License:Apache License
@VisibleForTesting static List<String> getPathElements(String path) { String trimmedPath = path.trim(); if (trimmedPath.charAt(0) != Path.SEPARATOR_CHAR) { throw new IllegalArgumentException("It must be an absolute path: " + trimmedPath); }/*from ww w. ja v a2 s. co m*/ List<String> list = new ArrayList<String>(32); int idx = 0; int found = trimmedPath.indexOf(Path.SEPARATOR_CHAR, idx); while (found > -1) { if (found > idx) { list.add(trimmedPath.substring(idx, found)); } idx = found + 1; found = trimmedPath.indexOf(Path.SEPARATOR_CHAR, idx); } if (idx < trimmedPath.length()) { list.add(trimmedPath.substring(idx)); } return list; }
From source file:org.apache.sentry.hdfs.HMSPaths.java
License:Apache License
static List<String> assemblePaths(List<List<String>> pathElements) { if (pathElements == null) { return Collections.emptyList(); }/*w ww .j a v a 2 s. co m*/ List<String> paths = new ArrayList<>(pathElements.size()); for (List<String> path : pathElements) { StringBuffer sb = new StringBuffer(); for (String elem : path) { sb.append(Path.SEPARATOR_CHAR).append(elem); } paths.add(sb.toString()); } return paths; }
From source file:org.apache.sentry.hdfs.SentryAuthorizationInfo.java
License:Apache License
private void setPrefixPaths(String[] pathPrefixes) { this.pathPrefixes = new String[pathPrefixes.length][]; for (int i = 0; i < this.pathPrefixes.length; i++) { Preconditions.checkArgument(pathPrefixes[i].startsWith("" + Path.SEPARATOR_CHAR), "Path prefix [" + pathPrefixes[i] + "]" + "does not starting with [" + Path.SEPARATOR_CHAR + "]"); this.pathPrefixes[i] = pathPrefixes[i].substring(1).split("" + Path.SEPARATOR_CHAR); }//from w w w . ja v a2 s .c o m }
From source file:org.apache.storm.hdfs.spout.TestHdfsSemantics.java
License:Apache License
@Test public void testDeleteSemantics() throws Exception { Path file = new Path(dir.toString() + Path.SEPARATOR_CHAR + "file1"); // try {/*from www. j ava2 s. co m*/ // 1) Delete absent file - should return false Assert.assertFalse(fs.exists(file)); try { Assert.assertFalse(fs.delete(file, false)); } catch (IOException e) { e.printStackTrace(); } // 2) deleting open file - should return true fs.create(file, false); Assert.assertTrue(fs.delete(file, false)); // 3) deleting closed file - should return true FSDataOutputStream os = fs.create(file, false); os.close(); Assert.assertTrue(fs.exists(file)); Assert.assertTrue(fs.delete(file, false)); Assert.assertFalse(fs.exists(file)); }
From source file:org.apache.storm.hdfs.spout.TestHdfsSemantics.java
License:Apache License
@Test public void testConcurrentDeletion() throws Exception { Path file = new Path(dir.toString() + Path.SEPARATOR_CHAR + "file1"); fs.create(file).close();/*from w w w . ja va 2 s.c om*/ // 1 concurrent deletion - only one thread should succeed FileDeletionThread[] thds = startThreads(10, file); int successCount = 0; for (FileDeletionThread thd : thds) { thd.join(); if (thd.succeeded) successCount++; if (thd.exception != null) Assert.assertNotNull(thd.exception); } System.err.println(successCount); Assert.assertEquals(1, successCount); }
From source file:org.apache.storm.hdfs.spout.TestHdfsSemantics.java
License:Apache License
@Test public void testAppendSemantics() throws Exception { //1 try to append to an open file Path file1 = new Path(dir.toString() + Path.SEPARATOR_CHAR + "file1"); FSDataOutputStream os1 = fs.create(file1, false); try {/*from ww w. j a v a 2 s .c o m*/ fs.append(file1); // should fail Assert.assertTrue("Append did not throw an exception", false); } catch (RemoteException e) { // expecting AlreadyBeingCreatedException inside RemoteException Assert.assertEquals(AlreadyBeingCreatedException.class, e.unwrapRemoteException().getClass()); } //2 try to append to a closed file os1.close(); FSDataOutputStream os2 = fs.append(file1); // should pass os2.close(); }
From source file:org.apache.storm.hdfs.spout.TestHdfsSemantics.java
License:Apache License
@Test public void testDoubleCreateSemantics() throws Exception { //1 create an already existing open file w/o override flag Path file1 = new Path(dir.toString() + Path.SEPARATOR_CHAR + "file1"); FSDataOutputStream os1 = fs.create(file1, false); try {//from w w w . j a v a2 s . co m fs.create(file1, false); // should fail Assert.assertTrue("Create did not throw an exception", false); } catch (RemoteException e) { Assert.assertEquals(AlreadyBeingCreatedException.class, e.unwrapRemoteException().getClass()); } //2 close file and retry creation os1.close(); try { fs.create(file1, false); // should still fail } catch (FileAlreadyExistsException e) { // expecting this exception } //3 delete file and retry creation fs.delete(file1, false); FSDataOutputStream os2 = fs.create(file1, false); // should pass Assert.assertNotNull(os2); os2.close(); }
From source file:org.araqne.storage.hdfs.HDFSFilePath.java
License:Apache License
@Override public char getSeperatorChar() { return Path.SEPARATOR_CHAR; }
From source file:org.icgc.dcc.submission.fs.DccFileSystem.java
License:Open Source License
private String concatPath(String... parts) { return Joiner.on(Path.SEPARATOR_CHAR).join(parts); }