Example usage for org.apache.hadoop.fs FileSystem open

List of usage examples for org.apache.hadoop.fs FileSystem open

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileSystem open.

Prototype

public FSDataInputStream open(PathHandle fd) throws IOException 

Source Link

Document

Open an FSDataInputStream matching the PathHandle instance.

Usage

From source file:azkaban.common.web.TextFileViewer.java

License:Apache License

public void displayFile(FileSystem fs, Path path, OutputStream outputStream, int startLine, int endLine)
        throws IOException {

    if (logger.isDebugEnabled())
        logger.debug("read in uncompressed text file");
    InputStream inputStream = fs.open(path);
    BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
    PrintWriter output = new PrintWriter(outputStream);
    for (int i = 1; i < startLine; i++)
        reader.readLine();/*from  ww  w .  j  a  va  2  s  .  co m*/

    final int bufferLimit = 1000000; //only display the first 1M chars. it is used to prevent showing/downloading gb of data
    int bufferSize = 0;
    for (int i = startLine; i < endLine; i++) {
        String line = reader.readLine();
        if (line == null)
            break;

        // bread if reach the buffer limit
        bufferSize += line.length();
        if (bufferSize >= bufferLimit)
            break;

        output.write(line);
        output.write("\n");
    }
    output.flush();
    reader.close();
}

From source file:azkaban.crypto.Decryptions.java

License:Open Source License

public String decrypt(final String cipheredText, final String passphrasePath, final FileSystem fs)
        throws IOException {
    Preconditions.checkNotNull(cipheredText);
    Preconditions.checkNotNull(passphrasePath);

    final Path path = new Path(passphrasePath);
    Preconditions.checkArgument(fs.exists(path), "File does not exist at " + passphrasePath);
    Preconditions.checkArgument(fs.isFile(path), "Passphrase path is not a file. " + passphrasePath);

    final FileStatus fileStatus = fs.getFileStatus(path);
    Preconditions.checkArgument(USER_READ_PERMISSION_ONLY.equals(fileStatus.getPermission()),
            "Passphrase file should only have read only permission on only user. " + passphrasePath);

    final Crypto crypto = new Crypto();
    try (BufferedReader br = new BufferedReader(
            new InputStreamReader(fs.open(path), Charset.defaultCharset()))) {
        final String passphrase = br.readLine();
        final String decrypted = crypto.decrypt(cipheredText, passphrase);
        Preconditions.checkNotNull(decrypted, "Was not able to decrypt");
        return decrypted;
    }/*from   ww  w. j  a  va 2s . c om*/
}

From source file:azkaban.jobtype.javautils.Whitelist.java

License:Open Source License

/**
 * Updates whitelist if there's any change. If it needs to update whitelist, it enforces writelock
 * to make sure/*from w w w.  j  a  v a  2 s  .c  om*/
 * there's an exclusive access on shared variables.
 */
@VisibleForTesting
Set<String> retrieveWhitelist(FileSystem fs, Path path) {
    try {
        Preconditions.checkArgument(fs.exists(path), "File does not exist at " + path);
        Preconditions.checkArgument(fs.isFile(path), "Whitelist path is not a file. " + path);

        Set<String> result = Sets.newHashSet();
        try (BufferedReader br = new BufferedReader(
                new InputStreamReader(fs.open(path), StandardCharsets.UTF_8))) {
            String s = null;
            while (!StringUtils.isEmpty((s = br.readLine()))) {
                result.add(s);
            }
        }
        return result;
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}

From source file:azkaban.jobtype.StatsUtils.java

License:Apache License

public static Properties getJobConf(RunningJob runningJob) {
    try {//from   www  .  j av a 2 s  . c  o m
        Path path = new Path(runningJob.getJobFile());
        Configuration conf = new Configuration(false);
        FileSystem fs = FileSystem.get(new Configuration());
        InputStream in = fs.open(path);
        conf.addResource(in);
        return getJobConf(conf);
    } catch (FileNotFoundException e) {
        logger.warn("Job conf not found.");
    } catch (IOException e) {
        logger.warn("Error while retrieving job conf: " + e.getMessage());
    }
    return null;
}

From source file:azkaban.viewer.hdfs.AvroFileViewer.java

License:Apache License

private DataFileStream<Object> getAvroDataStream(FileSystem fs, Path path) throws IOException {
    if (logger.isDebugEnabled()) {
        logger.debug("path:" + path.toUri().getPath());
    }/*from w w  w . j  a  v a2  s.co  m*/

    GenericDatumReader<Object> avroReader = new GenericDatumReader<Object>();
    InputStream hdfsInputStream = null;
    try {
        hdfsInputStream = fs.open(path);
    } catch (IOException e) {
        if (hdfsInputStream != null) {
            hdfsInputStream.close();
        }
        throw e;
    }

    DataFileStream<Object> avroDataFileStream = null;
    try {
        avroDataFileStream = new DataFileStream<Object>(hdfsInputStream, avroReader);
    } catch (IOException e) {
        if (hdfsInputStream != null) {
            hdfsInputStream.close();
        }
        throw e;
    }

    return avroDataFileStream;
}

From source file:azkaban.viewer.hdfs.HdfsAvroFileViewer.java

License:Apache License

private DataFileStream<Object> getAvroDataStream(FileSystem fs, Path path) throws IOException {
    if (logger.isDebugEnabled())
        logger.debug("path:" + path.toUri().getPath());

    GenericDatumReader<Object> avroReader = new GenericDatumReader<Object>();
    InputStream hdfsInputStream = null;
    try {//w ww.  ja va2s .co m
        hdfsInputStream = fs.open(path);
    } catch (Exception e) {
        if (hdfsInputStream != null) {
            hdfsInputStream.close();
        }
    }

    DataFileStream<Object> avroDataFileStream = null;
    try {
        avroDataFileStream = new DataFileStream<Object>(hdfsInputStream, avroReader);
    } catch (IOException e) {
        if (hdfsInputStream != null) {
            hdfsInputStream.close();
        }
        throw e;
    }

    return avroDataFileStream;
}

From source file:azkaban.viewer.hdfs.HdfsImageFileViewer.java

License:Apache License

public void displayFile(FileSystem fs, Path path, OutputStream outputStream, int startLine, int endLine)
        throws IOException {

    if (logger.isDebugEnabled()) {
        logger.debug("read in image file");
    }//ww w .  j  av a  2  s  .com

    InputStream inputStream = null;
    try {
        inputStream = new BufferedInputStream(fs.open(path));
        BufferedOutputStream output = new BufferedOutputStream(outputStream);
        long outputSize = 0L;
        byte[] buffer = new byte[16384];
        int len;
        while ((len = inputStream.read(buffer)) != -1) {
            output.write(buffer, 0, len);
            outputSize += len;
            if (outputSize > MAX_IMAGE_FILE_SIZE)
                break;
        }
        output.flush();
    } finally {
        if (inputStream != null) {
            inputStream.close();
        }
    }
}

From source file:azkaban.viewer.hdfs.ImageFileViewer.java

License:Apache License

public void displayFile(FileSystem fs, Path path, OutputStream outputStream, int startLine, int endLine)
        throws IOException {

    if (logger.isDebugEnabled()) {
        logger.debug("read in image file");
    }//from w  w  w  .ja v  a2 s  .c o  m

    InputStream inputStream = null;
    try {
        inputStream = new BufferedInputStream(fs.open(path));
        BufferedOutputStream output = new BufferedOutputStream(outputStream);
        long outputSize = 0L;
        byte[] buffer = new byte[16384];
        int len;
        while ((len = inputStream.read(buffer)) != -1) {
            output.write(buffer, 0, len);
            outputSize += len;
            if (outputSize > MAX_IMAGE_FILE_SIZE) {
                break;
            }
        }
        output.flush();
    } finally {
        if (inputStream != null) {
            inputStream.close();
        }
    }
}

From source file:azkaban.viewer.hdfs.TextFileViewer.java

License:Apache License

public void displayFile(FileSystem fs, Path path, OutputStream outputStream, int startLine, int endLine)
        throws IOException {

    if (logger.isDebugEnabled())
        logger.debug("read in uncompressed text file");

    InputStream inputStream = null;
    BufferedReader reader = null;
    try {//from w w w .  jav a  2s  .com
        inputStream = fs.open(path);
        reader = new BufferedReader(new InputStreamReader(inputStream));
        PrintWriter output = new PrintWriter(outputStream);
        for (int i = 1; i < startLine; i++)
            reader.readLine();

        // only display the first 1M chars. it is used to prevent
        // showing/downloading gb of data
        final int bufferLimit = 1000000;

        int bufferSize = 0;
        for (int i = startLine; i < endLine; i++) {
            String line = reader.readLine();
            if (line == null)
                break;

            // bread if reach the buffer limit
            bufferSize += line.length();
            if (bufferSize >= bufferLimit)
                break;

            output.write(line);
            output.write("\n");
        }
        output.flush();
    } finally {
        if (reader != null) {
            reader.close();
        }
        if (inputStream != null) {
            inputStream.close();
        }
    }
}

From source file:azkaban.webapp.servlet.hdfsviewer.TextFileViewer.java

License:Apache License

public void displayFile(FileSystem fs, Path path, OutputStream outputStream, int startLine, int endLine)
        throws IOException {

    if (logger.isDebugEnabled())
        logger.debug("read in uncompressed text file");

    InputStream inputStream = null;
    BufferedReader reader = null;
    try {/*ww  w.  jav  a  2 s.c o  m*/
        inputStream = fs.open(path);
        reader = new BufferedReader(new InputStreamReader(inputStream));
        PrintWriter output = new PrintWriter(outputStream);
        for (int i = 1; i < startLine; i++)
            reader.readLine();

        final int bufferLimit = 1000000; //only display the first 1M chars. it is used to prevent showing/downloading gb of data
        int bufferSize = 0;
        for (int i = startLine; i < endLine; i++) {
            String line = reader.readLine();
            if (line == null)
                break;

            // bread if reach the buffer limit
            bufferSize += line.length();
            if (bufferSize >= bufferLimit)
                break;

            output.write(line);
            output.write("\n");
        }
        output.flush();
    } finally {
        if (reader != null) {
            reader.close();
        }
        if (inputStream != null) {
            inputStream.close();
        }
    }
}