List of usage examples for org.apache.hadoop.fs FileSystem makeQualified
public Path makeQualified(Path path)
From source file:ml.shifu.guagua.yarn.util.YarnUtils.java
License:Apache License
/** * Working folder to store jars, files and other resources *//*from ww w .j ava 2 s. c om*/ public static Path getAppDirectory(FileSystem fs, ApplicationId appId) { return fs.makeQualified(new Path( new Path(File.separator + GUAGUA_YARN_TMP, GuaguaYarnConstants.GUAGUA_HDFS_DIR), appId.toString())); }
From source file:ml.shifu.guagua.yarn.util.YarnUtils.java
License:Apache License
private static void copyToFs(Configuration conf, String local, String remote) throws IOException { FileSystem fs = FileSystem.get(conf); Path src = new Path(local); Path dst = fs.makeQualified(new Path(remote)); fs.copyFromLocalFile(false, true, src, dst); }
From source file:ml.shifu.shifu.udf.EncodeDataUDF.java
License:Apache License
public EncodeDataUDF(String source, String pathModelConfig, String pathColumnConfig, String evalSetName) throws IOException { super(source, pathModelConfig, pathColumnConfig, evalSetName); this.pathFinder = new PathFinder(this.modelConfig); // get model path SourceType sourceType = SourceType.valueOf(source); FileSystem fileSystem = ShifuFileUtils.getFileSystemBySourceType(sourceType); Path modelPath = fileSystem .makeQualified(new Path(this.pathFinder.getModelsPath(sourceType), getModelName(0))); // load Tree model InputStream inputStream = null; try {/*from www . j av a 2s . c o m*/ inputStream = ShifuFileUtils.getInputStream(modelPath, sourceType); this.treeModel = IndependentTreeModel.loadFromStream(inputStream); } finally { IOUtils.closeQuietly(inputStream); } if (StringUtils.isNotBlank(evalSetName)) { for (EvalConfig evalConfig : this.modelConfig.getEvals()) { if (evalConfig.getName().equals(evalSetName)) { this.evalConfig = evalConfig; break; } } } }
From source file:org.apache.accumulo.core.client.admin.TableOperationsImpl.java
License:Apache License
@SuppressWarnings("deprecation") private Path checkPath(String dir, String kind, String type) throws IOException, AccumuloException { Path ret;//from w w w . j a v a 2 s . c o m FileSystem fs = FileUtil.getFileSystem(dir, CachedConfiguration.getInstance(), ServerConfigurationUtil.getConfiguration(instance)); if (dir.contains(":")) { ret = new Path(dir); } else { ret = fs.makeQualified(new Path(dir)); } if (!fs.exists(ret)) throw new AccumuloException(kind + " import " + type + " directory " + dir + " does not exist!"); if (!fs.getFileStatus(ret).isDir()) { throw new AccumuloException(kind + " import " + type + " directory " + dir + " is not a directory!"); } if (type.equals("failure")) { FileStatus[] listStatus = fs.listStatus(ret); if (listStatus != null && listStatus.length != 0) { throw new AccumuloException("Bulk import failure directory " + ret + " is not empty"); } } return ret; }
From source file:org.apache.accumulo.core.client.impl.BulkImport.java
License:Apache License
/** * Check path of bulk directory and permissions *//* w ww . ja va 2s .co m*/ private Path checkPath(FileSystem fs, String dir) throws IOException, AccumuloException { Path ret; if (dir.contains(":")) { ret = new Path(dir); } else { ret = fs.makeQualified(new Path(dir)); } try { if (!fs.getFileStatus(ret).isDirectory()) { throw new AccumuloException("Bulk import directory " + dir + " is not a directory!"); } Path tmpFile = new Path(ret, "isWritable"); if (fs.createNewFile(tmpFile)) fs.delete(tmpFile, true); else throw new AccumuloException("Bulk import directory " + dir + " is not writable."); } catch (FileNotFoundException fnf) { throw new AccumuloException("Bulk import directory " + dir + " does not exist or has bad permissions", fnf); } return ret; }
From source file:org.apache.accumulo.core.client.impl.TableOperationsImpl.java
License:Apache License
private Path checkPath(String dir, String kind, String type) throws IOException, AccumuloException, AccumuloSecurityException { Path ret;/*ww w . jav a2 s .c o m*/ Map<String, String> props = context.getConnector().instanceOperations().getSystemConfiguration(); AccumuloConfiguration conf = new ConfigurationCopy(props); FileSystem fs = VolumeConfiguration.getVolume(dir, CachedConfiguration.getInstance(), conf).getFileSystem(); if (dir.contains(":")) { ret = new Path(dir); } else { ret = fs.makeQualified(new Path(dir)); } try { if (!fs.getFileStatus(ret).isDirectory()) { throw new AccumuloException( kind + " import " + type + " directory " + dir + " is not a directory!"); } } catch (FileNotFoundException fnf) { throw new AccumuloException(kind + " import " + type + " directory " + dir + " does not exist!"); } if (type.equals("failure")) { FileStatus[] listStatus = fs.listStatus(ret); if (listStatus != null && listStatus.length != 0) { throw new AccumuloException("Bulk import failure directory " + ret + " is not empty"); } } return ret; }
From source file:org.apache.accumulo.core.clientImpl.bulk.BulkImport.java
License:Apache License
/** * Check path of bulk directory and permissions *///from w ww .j a v a2 s . com private Path checkPath(FileSystem fs, String dir) throws IOException, AccumuloException { Path ret; if (dir.contains(":")) { ret = new Path(dir); } else { ret = fs.makeQualified(new Path(dir)); } try { if (!fs.getFileStatus(ret).isDirectory()) { throw new AccumuloException("Bulk import directory " + dir + " is not a directory!"); } Path tmpFile = new Path(ret, "isWritable"); if (fs.createNewFile(tmpFile)) fs.delete(tmpFile, true); else throw new AccumuloException("Bulk import directory " + dir + " is not writable."); } catch (FileNotFoundException fnf) { throw new AccumuloException("Bulk import directory " + dir + " does not exist or has bad permissions", fnf); } // TODO ensure dir does not contain bulk load mapping return ret; }
From source file:org.apache.accumulo.core.clientImpl.TableOperationsImpl.java
License:Apache License
private Path checkPath(String dir, String kind, String type) throws IOException, AccumuloException, AccumuloSecurityException { Path ret;/*from w w w.j a v a 2 s .c o m*/ Map<String, String> props = context.instanceOperations().getSystemConfiguration(); AccumuloConfiguration conf = new ConfigurationCopy(props); FileSystem fs = VolumeConfiguration.getVolume(dir, context.getHadoopConf(), conf).getFileSystem(); if (dir.contains(":")) { ret = new Path(dir); } else { ret = fs.makeQualified(new Path(dir)); } try { if (!fs.getFileStatus(ret).isDirectory()) { throw new AccumuloException( kind + " import " + type + " directory " + dir + " is not a directory!"); } } catch (FileNotFoundException fnf) { throw new AccumuloException(kind + " import " + type + " directory " + dir + " does not exist!"); } if (type.equals("failure")) { FileStatus[] listStatus = fs.listStatus(ret); if (listStatus != null && listStatus.length != 0) { throw new AccumuloException("Bulk import failure directory " + ret + " is not empty"); } } return ret; }
From source file:org.apache.accumulo.core.file.map.MapFileUtil.java
License:Apache License
public static MapFile.Reader openMapFile(AccumuloConfiguration acuconf, FileSystem fs, String dirName, Configuration conf) throws IOException { MapFile.Reader mfr = null;/* w w w.j a v a 2 s.co m*/ try { mfr = new MapFile.Reader(fs.makeQualified(new Path(dirName)), conf); return mfr; } catch (IOException e) { throw e; } }
From source file:org.apache.accumulo.core.file.map.MapFileUtil.java
License:Apache License
public static SequenceFile.Reader openIndex(Configuration conf, FileSystem fs, Path mapFile) throws IOException { Path indexPath = new Path(mapFile, MapFile.INDEX_FILE_NAME); SequenceFile.Reader index = null; try {//from w w w.j a va2s . c o m index = new SequenceFile.Reader(conf, SequenceFile.Reader.file(fs.makeQualified(indexPath))); return index; } catch (IOException e) { throw e; } }