Example usage for org.apache.hadoop.fs FileSystem isFile

List of usage examples for org.apache.hadoop.fs FileSystem isFile

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileSystem isFile.

Prototype

@Deprecated
public boolean isFile(Path f) throws IOException 

Source Link

Document

True iff the named path is a regular file.

Usage

From source file:edu.ucsb.cs.lsh.projection.SignaturesGenerator.java

License:Apache License

public static void prepareDistributedCache(JobConf job, FileSystem fs, Path path)
        throws URISyntaxException, IOException {
    FileStatus[] files = fs.listStatus(path);
    System.out.println("path to read from is: " + path.getName()); // remove
    for (FileStatus file : files)
        if (fs.isFile(file.getPath()) && !file.getPath().getName().startsWith("_"))
            DistributedCache.addCacheFile(file.getPath().toUri(), job);
}

From source file:edu.ucsb.cs.partitioning.cosine.Partitioner.java

License:Apache License

public static FileStatus[] setFiles(FileSystem hdfs, Path inputPath) throws IOException {
    if (hdfs.isFile(inputPath))
        return hdfs.listStatus(inputPath.getParent());
    else//from  ww  w.ja v  a 2  s.co  m
        return hdfs.listStatus(inputPath);
}

From source file:edu.ucsb.cs.partitioning.statistics.Collector.java

License:Apache License

public static FileStatus[] getFiles(Path inputPath, FileSystem fs) throws IOException {

    FileStatus[] files = null;// ww  w  .  j ava2s .c  o  m
    if (fs.exists(inputPath)) {
        if (fs.isFile(inputPath)) {
            files = new FileStatus[1];
            files[0] = new FileStatus(0, false, 1, 1, 1, inputPath);
        } else
            files = fs.listStatus(inputPath);
    } else
        throw new FileNotFoundException("Error: " + inputPath.getName() + " does not exist.");
    return files;
}

From source file:edu.ucsb.cs.partitioning.statistics.CollectorBaraglia.java

License:Apache License

public static FileStatus[] getFiles(Path inputPath, FileSystem fs) throws IOException {

    FileStatus[] files = null;/*from ww w  .  j a va  2  s  .  c  o m*/
    if (fs.exists(inputPath)) {
        if (fs.isFile(inputPath)) {
            files = new FileStatus[1];
            files[0] = new FileStatus(0, false, 1, 1, 1, inputPath);
        } else
            files = fs.listStatus(inputPath);
    }
    return files;
}

From source file:edu.ucsb.cs.partitioning.statistics.rsd.java

License:Apache License

public static void main(String[] args) throws IOException {
    if (args.length != 2) {
        System.out.println("Usage:<input directory of (longWritable,FeatureWeightArrayWritable)> <p-norm>");
        return;//from   w w w . j  a  va  2s .c o  m
    }
    Configuration conf = new Configuration();
    Path inputPath = new Path(args[0]);
    FileSystem hdfs = inputPath.getFileSystem(conf);
    int lineCount = 0, pnorm = Integer.parseInt(args[1]);
    ArrayList<Float> pnorms = null, norm1 = null;
    Reader reader = null;

    if (!hdfs.exists(inputPath) || hdfs.isFile(inputPath)) {
        System.out.println("\n Input doesn't exists or is not a directory!");
        return;
    }

    FileStatus[] files = setFiles(hdfs, inputPath);
    for (int i = 0; i < files.length; i++) {
        inputPath = files[i].getPath();
        if (hdfs.isDirectory(inputPath) || inputPath.getName().startsWith("_"))
            continue;
        System.out.println("Reading file " + inputPath.getName()); // remove
        reader = new SequenceFile.Reader(hdfs, inputPath, conf);

        LongWritable key = new LongWritable();
        FeatureWeightArrayWritable value = new FeatureWeightArrayWritable();

        pnorms = new ArrayList<Float>();

        while (reader.next(key, value)) {
            pnorms.add(value.getPNorm(pnorm));
            lineCount++;
        }
    }
    float pnormrstd = getRStd(pnorms);

    System.out
            .println("\nInput has " + lineCount + " records.\n" + pnorm + "-Norm %-RSD = " + (pnormrstd * 100));
    reader.close();
}

From source file:edu.umn.cs.spatialHadoop.operations.LocalSampler.java

License:Open Source License

/**
 * Read a random sample of up-to count from the input files.
 * @param files// w w w.  j  av a 2  s  .co  m
 * @param ratioOrCount
 * @param output
 * @param conf
 * @return
 * @throws IOException
 * @throws InterruptedException
 */
public static long sampleLocal(Path[] files, float ratioOrCount, ResultCollector<Text> output,
        Configuration conf) throws IOException, InterruptedException {
    Vector<FileSplit> splits = new Vector<FileSplit>();
    for (Path file : files) {
        FileSystem fs = file.getFileSystem(conf);
        if (fs.isFile(file)) {
            // A single file. Include it
            splits.add(new FileSplit(file, 0, fs.getFileStatus(file).getLen(), new String[0]));
        } else {
            // A directory. Include all contents
            FileStatus[] contents = fs.listStatus(file);
            for (FileStatus content : contents) {
                if (!content.isDirectory())
                    splits.add(new FileSplit(content.getPath(), 0, content.getLen(), new String[0]));
            }
        }
    }
    return sampleLocal(splits.toArray(new FileSplit[splits.size()]), ratioOrCount, output, conf);
}

From source file:eu.stratosphere.hadoopcompatibility.FileOutputCommitterWrapper.java

License:Apache License

private void moveTaskOutputs(JobConf conf, TaskAttemptID taskAttemptID, FileSystem fs, Path jobOutputDir,
        Path taskOutput) throws IOException {
    if (fs.isFile(taskOutput)) {
        Path finalOutputPath = getFinalPath(jobOutputDir, taskOutput,
                getTempTaskOutputPath(conf, taskAttemptID));
        if (!fs.rename(taskOutput, finalOutputPath)) {
            if (!fs.delete(finalOutputPath, true)) {
                throw new IOException("Failed to delete earlier output of task: " + taskAttemptID);
            }/*from w  w w  .  j ava 2s  .c o  m*/
            if (!fs.rename(taskOutput, finalOutputPath)) {
                throw new IOException("Failed to save output of task: " + taskAttemptID);
            }
        }
        LOG.debug("Moved " + taskOutput + " to " + finalOutputPath);
    } else if (fs.getFileStatus(taskOutput).isDir()) {
        FileStatus[] paths = fs.listStatus(taskOutput);
        Path finalOutputPath = getFinalPath(jobOutputDir, taskOutput,
                getTempTaskOutputPath(conf, taskAttemptID));
        fs.mkdirs(finalOutputPath);
        if (paths != null) {
            for (FileStatus path : paths) {
                moveTaskOutputs(conf, taskAttemptID, fs, jobOutputDir, path.getPath());
            }
        }
    }
}

From source file:fr.ens.biologie.genomique.eoulsan.util.hadoop.PathUtils.java

License:LGPL

/**
 * Create a new temporary path. Nothing is created on the file system.
 * @param directory parent directory of the temporary file to create
 * @param prefix Prefix of the temporary file
 * @param suffix suffix of the temporary file
 * @return the new temporary file/*  w  ww .  j a v  a  2 s  .  c o m*/
 * @throws IOException if there is an error creating the temporary directory
 */
public static Path createTempPath(final Path directory, final String prefix, final String suffix,
        final Configuration conf) throws IOException {

    final Path myDir;
    final String myPrefix;
    final String mySuffix;

    if (directory == null) {
        throw new NullPointerException("Directory is null");
    }

    if (conf == null) {
        throw new NullPointerException("Configuration is null");
    }

    myDir = directory;

    if (prefix == null) {
        myPrefix = "";
    } else {
        myPrefix = prefix;
    }

    if (suffix == null) {
        mySuffix = "";
    } else {
        mySuffix = suffix;
    }

    final FileSystem fs = directory.getFileSystem(conf);
    Path tempFile;

    final int maxAttempts = 9;
    int attemptCount = 0;
    do {
        attemptCount++;
        if (attemptCount > maxAttempts) {
            throw new IOException("The highly improbable has occurred! Failed to "
                    + "create a unique temporary directory after " + maxAttempts + " attempts.");
        }

        final String filename = myPrefix + UUID.randomUUID().toString() + mySuffix;
        tempFile = new Path(myDir, filename);
    } while (fs.isFile(tempFile));

    return tempFile;
}

From source file:fr.ens.biologie.genomique.eoulsan.util.hadoop.PathUtils.java

License:LGPL

/**
 * Check if a file exists/*from w  w w  .jav a 2  s  .  c  om*/
 * @param file file to test
 * @param conf Configuration
 * @return true is the directory exists
 */
public static final boolean isFile(final Path file, final Configuration conf) throws IOException {

    if (file == null) {
        throw new NullPointerException("The path is null");
    }

    if (conf == null) {
        throw new NullPointerException("The configuration is null");
    }

    final FileSystem fs = file.getFileSystem(conf);

    return fs.isFile(file);
}

From source file:fr.ens.biologie.genomique.eoulsan.util.hadoop.PathUtils.java

License:LGPL

/**
 * Check if a file exists//from w ww . j  a  v a  2s .c o m
 * @param file File to test * @param conf Configuration
 * @param msgFileType message for the description of the file
 * @throws IOException if the file doesn't exists
 */
public static final void checkExistingStandardFile(final Path file, final Configuration conf,
        final String msgFileType) throws IOException {

    checkExistingFile(file, conf, msgFileType);

    final FileSystem fs = file.getFileSystem(conf);

    if (!fs.isFile(file)) {
        throw new IOException("The " + msgFileType + " is  not a standard file: " + file);
    }
}