Example usage for org.apache.hadoop.io IOUtils closeStream

List of usage examples for org.apache.hadoop.io IOUtils closeStream

Introduction

In this page you can find the example usage for org.apache.hadoop.io IOUtils closeStream.

Prototype

public static void closeStream(java.io.Closeable stream) 

Source Link

Document

Closes the stream ignoring Throwable .

Usage

From source file:hdfs.FileUtil.java

License:Apache License

/** Copy files between FileSystems. */
public static boolean copy(FileSystem srcFS, Path src, FileSystem dstFS, Path dst, boolean deleteSource,
        boolean overwrite, Configuration conf) throws IOException {
    dst = checkDest(src.getName(), dstFS, dst, overwrite);

    if (srcFS.getFileStatus(src).isDir()) {
        checkDependencies(srcFS, src, dstFS, dst);
        if (!dstFS.mkdirs(dst)) {
            return false;
        }// www . j  ava  2  s  .  c om
        FileStatus contents[] = srcFS.listStatus(src);
        for (int i = 0; i < contents.length; i++) {
            copy(srcFS, contents[i].getPath(), dstFS, new Path(dst, contents[i].getPath().getName()),
                    deleteSource, overwrite, conf);
        }
    } else if (srcFS.isFile(src)) {
        InputStream in = null;
        OutputStream out = null;
        try {
            in = srcFS.open(src);
            out = dstFS.create(dst, overwrite);
            IOUtils.copyBytes(in, out, conf, true);
        } catch (IOException e) {
            IOUtils.closeStream(out);
            IOUtils.closeStream(in);
            throw e;
        }
    } else {
        throw new IOException(src.toString() + ": No such file or directory");
    }
    if (deleteSource) {
        return srcFS.delete(src, true);
    } else {
        return true;
    }

}

From source file:hdfs.FileUtil.java

License:Apache License

/** Copy local files to a FileSystem. */
public static boolean copy(File src, FileSystem dstFS, Path dst, boolean deleteSource, Configuration conf)
        throws IOException {
    dst = checkDest(src.getName(), dstFS, dst, false);

    if (src.isDirectory()) {
        if (!dstFS.mkdirs(dst)) {
            return false;
        }/*w ww . j a v  a 2 s  .c o m*/
        File contents[] = listFiles(src);
        for (int i = 0; i < contents.length; i++) {
            copy(contents[i], dstFS, new Path(dst, contents[i].getName()), deleteSource, conf);
        }
    } else if (src.isFile()) {
        InputStream in = null;
        OutputStream out = null;
        try {
            in = new FileInputStream(src);
            out = dstFS.create(dst);
            IOUtils.copyBytes(in, out, conf);
        } catch (IOException e) {
            IOUtils.closeStream(out);
            IOUtils.closeStream(in);
            throw e;
        }
    } else {
        throw new IOException(src.toString() + ": No such file or directory");
    }
    if (deleteSource) {
        return FileUtil.fullyDelete(src);
    } else {
        return true;
    }
}

From source file:hydrograph.engine.spark.datasource.utils.FTPUtil.java

License:Apache License

public void upload(RunFileTransferEntity runFileTransferEntity) {
    log.debug("Start FTPUtil upload");

    FTPClient ftpClient = new FTPClient();
    ftpClient.enterLocalPassiveMode();/*from w w  w. j ava 2 s  . c o m*/
    ftpClient.setBufferSize(1024000);

    int retryAttempt = runFileTransferEntity.getRetryAttempt();
    int attemptCount = 1;
    int i = 0;

    InputStream inputStream = null;
    boolean login = false;
    File filecheck = new File(runFileTransferEntity.getInputFilePath());
    log.info("input file name" + filecheck.getName());
    if (runFileTransferEntity.getFailOnError()) {
        if (!(filecheck.isFile() || filecheck.isDirectory())
                && !(runFileTransferEntity.getInputFilePath().contains("hdfs://"))) {
            log.error("Invalid input file path. Please provide valid input file path.");
            throw new FTPUtilException("Invalid input file path");
        }
    }

    boolean done = false;
    for (i = 0; i < retryAttempt; i++) {
        try {
            log.info("Connection attempt: " + (i + 1));
            if (runFileTransferEntity.getTimeOut() != 0)
                if (runFileTransferEntity.getEncoding() != null)
                    ftpClient.setControlEncoding(runFileTransferEntity.getEncoding());
            ftpClient.setConnectTimeout(runFileTransferEntity.getTimeOut());
            log.debug("connection details: " + "/n" + "Username: " + runFileTransferEntity.getUserName() + "/n"
                    + "HostName " + runFileTransferEntity.getHostName() + "/n" + "Portno"
                    + runFileTransferEntity.getPortNo());
            ftpClient.connect(runFileTransferEntity.getHostName(), runFileTransferEntity.getPortNo());
            login = ftpClient.login(runFileTransferEntity.getUserName(), runFileTransferEntity.getPassword());
            if (!login) {
                log.error("Invalid FTP details provided. Please provide correct FTP details.");
                throw new FTPUtilException("Invalid FTP details");
            }
            ftpClient.enterLocalPassiveMode();
            ftpClient.setFileType(FTP.BINARY_FILE_TYPE);
            if (runFileTransferEntity.getInputFilePath().contains("hdfs://")) {
                log.debug("Processing for HDFS input file path");
                String inputPath = runFileTransferEntity.getInputFilePath();

                String s1 = inputPath.substring(7, inputPath.length());

                String s2 = s1.substring(0, s1.indexOf("/"));

                int index = runFileTransferEntity.getInputFilePath()
                        .replaceAll(Matcher.quoteReplacement("\\"), "/").lastIndexOf('/');

                String file_name = runFileTransferEntity.getInputFilePath().substring(index + 1);

                File f = new File("/tmp");
                if (!f.exists())
                    f.mkdir();
                Configuration conf = new Configuration();
                conf.set("fs.defaultFS", "hdfs://" + s2);
                FileSystem hdfsFileSystem = FileSystem.get(conf);
                Path local = new Path("/tmp");
                String s = inputPath.substring(7, inputPath.length());
                String hdfspath = s.substring(s.indexOf("/"), s.length());
                File dir = new File(hdfspath);
                Random ran = new Random();
                String tempFolder = "ftp_sftp_" + System.nanoTime() + "_" + ran.nextInt(1000);
                File dirs = new File("/tmp/" + tempFolder);
                boolean success = dirs.mkdirs();
                if (hdfsFileSystem.isDirectory(new Path(hdfspath))) {
                    log.debug("Provided HDFS input path is for directory.");
                    InputStream is = null;
                    OutputStream os = null;
                    String localDirectory = hdfspath.substring(hdfspath.lastIndexOf("/") + 1);
                    FileStatus[] fileStatus = hdfsFileSystem
                            .listStatus(new Path(runFileTransferEntity.getInputFilePath()));
                    Path[] paths = FileUtil.stat2Paths(fileStatus);
                    try {
                        String folderName = hdfspath.substring(hdfspath.lastIndexOf("/") + 1);
                        Path hdfs = new Path(hdfspath);
                        for (Path file : paths) {
                            is = hdfsFileSystem.open(file);
                            os = new BufferedOutputStream(
                                    new FileOutputStream(dirs + "" + File.separatorChar + file.getName()));
                            IOUtils.copyBytes(is, os, conf);
                        }
                        ftpClient.changeWorkingDirectory(runFileTransferEntity.getOutFilePath()
                                .replaceAll(Matcher.quoteReplacement("\\"), "/"));
                        ftpClient.removeDirectory(folderName);
                        ftpClient.makeDirectory(folderName);
                        ftpClient.changeWorkingDirectory(runFileTransferEntity.getOutFilePath().replaceAll(
                                Matcher.quoteReplacement("\\"), "/") + File.separatorChar + folderName);
                        for (File files : dirs.listFiles()) {

                            if (files.isFile())
                                ftpClient.storeFile(files.getName().toString(),
                                        new BufferedInputStream(new FileInputStream(files)));

                        }
                    } catch (IOException e) {
                        log.error("Failed while doing FTP file", e);
                        //throw e;
                    } finally {
                        IOUtils.closeStream(is);
                        IOUtils.closeStream(os);
                        if (dirs != null) {
                            FileUtils.deleteDirectory(dirs);
                        }
                    }
                } else {
                    try {
                        Path hdfs = new Path(hdfspath);
                        hdfsFileSystem.copyToLocalFile(false, hdfs, local);
                        inputStream = new FileInputStream(dirs + file_name);
                        ftpClient.storeFile(file_name, new BufferedInputStream(inputStream));
                    } catch (Exception e) {
                        log.error("Failed while doing FTP file", e);
                        throw new FTPUtilException("Failed while doing FTP file", e);
                    } finally {
                        FileUtils.deleteDirectory(dirs);
                    }
                }
            } else {
                java.nio.file.Path file = new File(runFileTransferEntity.getInputFilePath()).toPath();
                if (Files.isDirectory(file)) {
                    log.debug("Provided input file path is for directory");
                    File dir = new File(runFileTransferEntity.getInputFilePath());
                    String folderName = new File(runFileTransferEntity.getInputFilePath()).getName();
                    ftpClient.changeWorkingDirectory(runFileTransferEntity.getOutFilePath()
                            .replaceAll(Matcher.quoteReplacement("\\"), "/"));
                    try {
                        ftpClient.removeDirectory(folderName);
                    } catch (IOException e) {
                        log.error("Failed while doing FTP file", e);
                        throw new FTPUtilException("Failed while doing FTP file", e);
                    }
                    ftpClient.makeDirectory(folderName);

                    ftpClient.changeWorkingDirectory(runFileTransferEntity.getOutFilePath()
                            .replaceAll(Matcher.quoteReplacement("\\"), "/") + "/" + folderName);
                    for (File files : dir.listFiles()) {

                        if (files.isFile())
                            ftpClient.storeFile(files.getName().toString(),
                                    new BufferedInputStream(new FileInputStream(files)));
                    }
                } else {

                    inputStream = new FileInputStream(runFileTransferEntity.getInputFilePath());
                    ftpClient.changeWorkingDirectory(runFileTransferEntity.getOutFilePath()
                            .replaceAll(Matcher.quoteReplacement("\\"), "/"));
                    int index = runFileTransferEntity.getInputFilePath()
                            .replaceAll(Matcher.quoteReplacement("\\"), "/").lastIndexOf('/');
                    String file_name = runFileTransferEntity.getInputFilePath().substring(index + 1);
                    ftpClient.storeFile(file_name, new BufferedInputStream(inputStream));
                }

            }
        } catch (Exception e) {
            log.error("Failed while doing FTP file", e);
            if (!login && runFileTransferEntity.getFailOnError()) {
                throw new FTPUtilException("Invalid FTP details");
            }
            try {
                Thread.sleep(runFileTransferEntity.getRetryAfterDuration());
            } catch (Exception e1) {
                log.error("Failed while sleeping for retry duration", e1);
            }
            continue;
        } finally {
            try {
                if (inputStream != null)
                    inputStream.close();
            } catch (IOException ioe) {

            }
        }
        done = true;
        break;
    }

    try {
        if (ftpClient != null) {
            ftpClient.logout();
            ftpClient.disconnect();

        }
    } catch (Exception e) {
        log.error("Failed while clossing the connection", e);
    } catch (Error e) {
        log.error("Failed while clossing the connection", e);
        //throw new RuntimeException(e);
    }

    if (runFileTransferEntity.getFailOnError() && !done) {
        log.error("File transfer failed");
        throw new FTPUtilException("File transfer failed");
    } else if (!done) {
        log.error("File transfer failed but mentioned fail on error as false");
    }
    log.debug("Finished FTPUtil upload");
}

From source file:hydrograph.engine.spark.datasource.utils.SFTPUtil.java

License:Apache License

public void upload(RunFileTransferEntity runFileTransferEntity) {
    log.debug("Start SFTPUtil upload");
    JSch jsch = new JSch();
    Session session = null;/*w w w. j  av a2s  .  c  o  m*/
    Channel channel = null;
    ChannelSftp sftpChannel = null;
    ZipInputStream zip = null;
    FileInputStream fin = null;
    int retryAttempt = 0;
    int i;
    File filecheck = new File(runFileTransferEntity.getInputFilePath());
    if (runFileTransferEntity.getFailOnError())
        if (!(filecheck.isFile() || filecheck.isDirectory())
                && !(runFileTransferEntity.getInputFilePath().contains("hdfs://"))) {
            log.error("invalid input file path,Please provide valid file path");
            throw new SFTPUtilException("Invalid input file path");
        }

    if (runFileTransferEntity.getRetryAttempt() == 0)
        retryAttempt = 1;
    else
        retryAttempt = runFileTransferEntity.getRetryAttempt();

    for (i = 0; i < retryAttempt; i++) {

        try {
            log.info("connection attempt: " + (i + 1));
            if (runFileTransferEntity.getPrivateKeyPath() != null) {
                jsch.addIdentity(runFileTransferEntity.getPrivateKeyPath());
            }
            log.debug("connection details: " + "/n" + "Username: " + runFileTransferEntity.getUserName() + "/n"
                    + "HostName " + runFileTransferEntity.getHostName() + "/n" + "Portno"
                    + runFileTransferEntity.getPortNo());
            session = jsch.getSession(runFileTransferEntity.getUserName(), runFileTransferEntity.getHostName(),
                    runFileTransferEntity.getPortNo());
            session.setConfig("PreferredAuthentications", "publickey,keyboard-interactive,password");
            session.setConfig("StrictHostKeyChecking", "no");
            if (runFileTransferEntity.getPassword() != null) {
                session.setPassword(runFileTransferEntity.getPassword());
            }
            if (runFileTransferEntity.getTimeOut() > 0) {

                session.setTimeout(runFileTransferEntity.getTimeOut());
            }

            session.connect();
            channel = session.openChannel("sftp");
            channel.connect();
            sftpChannel = (ChannelSftp) channel;
            sftpChannel.setFilenameEncoding(runFileTransferEntity.getEncoding());
            sftpChannel
                    .cd(runFileTransferEntity.getOutFilePath().replaceAll(Matcher.quoteReplacement("\\"), "/"));

            if (runFileTransferEntity.getInputFilePath().contains("hdfs://")) {
                log.debug("in hdfs file system transfer");
                String inputPath = runFileTransferEntity.getInputFilePath();
                File f = new File("/tmp");
                if (!f.exists())
                    f.mkdir();
                String s1 = inputPath.substring(7, inputPath.length());
                String s2 = s1.substring(0, s1.indexOf("/"));
                Configuration conf = new Configuration();
                conf.set("fs.defaultFS", "hdfs://" + s2);
                FileSystem hdfsFileSystem = FileSystem.get(conf);
                Path local = new Path("/tmp");
                String s = inputPath.substring(7, inputPath.length());
                String hdfspath = s.substring(s.indexOf("/"), s.length());

                File dir = new File(hdfspath);
                if (hdfsFileSystem.isDirectory(new Path(hdfspath))) {
                    log.debug("in hdfs file system folder path");
                    InputStream is = null;
                    OutputStream os = null;
                    String localDirectory = hdfspath.substring(hdfspath.lastIndexOf("/") + 1);
                    FileStatus[] fileStatus = hdfsFileSystem
                            .listStatus(new Path(runFileTransferEntity.getInputFilePath()));
                    Path[] paths = FileUtil.stat2Paths(fileStatus);
                    File dirs = null;

                    try {
                        String folderName = hdfspath.substring(hdfspath.lastIndexOf("/") + 1);

                        DateFormat df = new SimpleDateFormat("dd-MM-yyyy");
                        String dateWithoutTime = df.format(new Date()).toString();
                        java.util.Random ran = new Random();
                        String tempFolder = "ftp_sftp_" + System.nanoTime() + "_" + ran.nextInt(1000);
                        dirs = new File("/tmp/" + tempFolder);
                        boolean success = dirs.mkdirs();
                        for (Path file : paths) {

                            is = hdfsFileSystem.open(file);
                            os = new BufferedOutputStream(
                                    new FileOutputStream(dirs + "" + File.separatorChar + file.getName()));
                            IOUtils.copyBytes(is, os, conf);
                        }
                        try {

                            sftpChannel.cd(folderName);
                        } catch (SftpException e) {
                            sftpChannel.mkdir(folderName);
                            sftpChannel.cd(folderName);
                        }
                        for (File files : dirs.listFiles()) {

                            if (files.isFile())
                                if (files.isFile()) {

                                    sftpChannel.put(new BufferedInputStream(new FileInputStream(files)),
                                            files.getName());

                                }

                        }
                    }

                    catch (IOException e) {
                        log.error("error while transferring file", e);
                        throw new SFTPUtilException(e);
                    } finally {
                        IOUtils.closeStream(is);
                        IOUtils.closeStream(os);
                        if (dirs != null) {

                            FileUtils.deleteDirectory(dirs);
                        }

                    }

                } else {
                    log.debug("File transfer in normal mode");
                    Path hdfs = new Path(hdfspath);
                    hdfsFileSystem.copyToLocalFile(false, hdfs, local);
                    int index = inputPath.replaceAll(Matcher.quoteReplacement("\\"), "/").lastIndexOf('/');
                    String file_name = runFileTransferEntity.getInputFilePath().substring(index + 1);
                    fin = new FileInputStream("/tmp/" + file_name);
                    sftpChannel.cd(runFileTransferEntity.getOutFilePath()
                            .replaceAll(Matcher.quoteReplacement("\\"), "/"));
                    sftpChannel.put(new BufferedInputStream(fin), file_name);
                    i = i + 1;
                    fin.close();
                }

            } else {
                java.nio.file.Path file = new File(runFileTransferEntity.getInputFilePath()).toPath();
                if (Files.isDirectory(file)) {
                    log.debug("Folder transfer in SFTP");
                    File f = new File(file.toAbsolutePath().toString());
                    String folderName = new File(runFileTransferEntity.getInputFilePath()).getName();
                    sftpChannel.cd(runFileTransferEntity.getOutFilePath()
                            .replaceAll(Matcher.quoteReplacement("\\"), "/"));
                    try {

                        sftpChannel.cd(folderName);
                    } catch (SftpException e) {
                        log.error(
                                "changing the directory but the folde location not found,so create a new directory");
                        sftpChannel.cd(runFileTransferEntity.getOutFilePath()
                                .replaceAll(Matcher.quoteReplacement("\\"), "/"));
                        sftpChannel.mkdir(folderName);
                        sftpChannel.cd(folderName);
                    }

                    for (File files : f.listFiles()) {

                        if (files.isFile())
                            sftpChannel.put(new BufferedInputStream(new FileInputStream(files)),
                                    files.getName());

                    }

                } else {
                    int index = runFileTransferEntity.getInputFilePath()
                            .replaceAll(Matcher.quoteReplacement("\\"), "/").lastIndexOf('/');
                    String file_name = runFileTransferEntity.getInputFilePath().substring(index + 1);
                    fin = new FileInputStream(runFileTransferEntity.getInputFilePath());
                    sftpChannel.cd(runFileTransferEntity.getOutFilePath()
                            .replaceAll(Matcher.quoteReplacement("\\"), "/"));
                    sftpChannel.put(new BufferedInputStream(fin), file_name);
                    fin.close();
                }
            }
        } catch (JSchException e) {
            if (e.getMessage().compareTo("Auth fail") == 0) {
                log.error("authentication error,please provide valid details", e);
                if (runFileTransferEntity.getFailOnError())
                    throw new SFTPUtilException(e.getMessage());
            } else {
                log.error("error while transfering the file and retrying ", e);
                try {
                    Thread.sleep(runFileTransferEntity.getRetryAfterDuration());
                } catch (Exception e1) {
                    log.error("sleep duration for re attemp exception", e1);
                }
                continue;
            }

        } catch (Exception e) {
            log.error("Error while transfering the file", e);
            try {
                Thread.sleep(runFileTransferEntity.getRetryAfterDuration());
            } catch (Exception e1) {
                log.error("exception while sleep thread", e);
            }
            continue;
        } finally {
            try {
                if (fin != null)
                    fin.close();
            } catch (IOException ioe) {
                log.error("error while closing input stream ");
            }
        }

        done = true;
        break;

    }

    if (sftpChannel != null) {
        sftpChannel.disconnect();
    }
    if (channel != null) {
        channel.disconnect();
    }
    if (session != null) {
        session.disconnect();
    }
    if (runFileTransferEntity.getFailOnError() && !done) {
        log.error("File transfer failed");
        throw new SFTPUtilException("File transfer failed");
    } else if (!done) {
        log.error("File transfer failed but mentioned fail on error as false");
    }
    log.debug("Fininished SFTPUtil upload");
}

From source file:io.aos.hdfs.basics.AbstractHdfsFileTest.java

License:Apache License

private void testCreateHdfsFile(Path path) throws IOException {
    getLogger().info("Testing HDFS file creation.");
    byte[] buff = "The HDFS File content".getBytes(Charset.forName("UTF-8"));
    FSDataOutputStream outputStream = getFileSystem().create(path);
    outputStream.write(buff, 0, buff.length);
    outputStream.close();//from w  w  w  . j a  v a2s . c  o m
    FSDataInputStream in = getFileSystem().open(path);
    IOUtils.copyBytes(in, System.out, 4096, false);
    in.seek(0);
    IOUtils.copyBytes(in, System.out, 4096, false);
    IOUtils.closeStream(in);
}

From source file:io.aos.hdfs.basics.AbstractHdfsFileTest.java

License:Apache License

private void test1() throws IOException {
    Path srcPath = new Path("./src/test/resources/log4j.properties");
    Path dstPath = new Path("/log4j.properties");
    getFileSystem().copyFromLocalFile(srcPath, dstPath);
    FSDataInputStream in = getFileSystem().open(new Path("./log4j.properties"));
    IOUtils.copyBytes(in, System.out, 4096, false);
    in.seek(0);/* ww w.ja va  2 s  .co  m*/
    IOUtils.copyBytes(in, System.out, 4096, false);
    IOUtils.closeStream(in);
    OutputStream out = getFileSystem().create(new Path("./log4j.properties"), new Progressable() {
        @Override
        public void progress() {
            System.out.println(".");
        }
    });
    IOUtils.copyBytes(in, out, 4096, true);
}

From source file:io.aos.hdfs.FileDecompressor.java

License:Apache License

public static void main(String... args) throws Exception {
    String uri = args[0];/*from   w w w  .java 2 s . c  o  m*/
    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(URI.create(uri), conf);

    Path inputPath = new Path(uri);
    CompressionCodecFactory factory = new CompressionCodecFactory(conf);
    CompressionCodec codec = factory.getCodec(inputPath);
    if (codec == null) {
        System.err.println("No codec found for " + uri);
        System.exit(1);
    }

    String outputUri = CompressionCodecFactory.removeSuffix(uri, codec.getDefaultExtension());

    InputStream in = null;
    OutputStream out = null;
    try {
        in = codec.createInputStream(fs.open(inputPath));
        out = fs.create(new Path(outputUri));
        IOUtils.copyBytes(in, out, conf);
    } finally {
        IOUtils.closeStream(in);
        IOUtils.closeStream(out);
    }
}

From source file:io.aos.hdfs.FileSystemDoubleCat.java

License:Apache License

public static void main(String... args) throws Exception {
    String uri = args[0];//from  www  .j a v  a  2s  . c  om
    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(URI.create(uri), conf);
    FSDataInputStream in = null;
    try {
        in = fs.open(new Path(uri));
        IOUtils.copyBytes(in, System.out, 4096, false);
        in.seek(0); // go back to the start of the file
        IOUtils.copyBytes(in, System.out, 4096, false);
    } finally {
        IOUtils.closeStream(in);
    }
}

From source file:io.aos.hdfs.MapFileWriteDemo.java

License:Apache License

public static void main(String... args) throws IOException {
    String uri = args[0];/*from   w ww.  j a va2  s .c  o m*/
    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(URI.create(uri), conf);

    IntWritable key = new IntWritable();
    Text value = new Text();
    MapFile.Writer writer = null;
    try {
        writer = new MapFile.Writer(conf, fs, uri, key.getClass(), value.getClass());

        for (int i = 0; i < 1024; i++) {
            key.set(i + 1);
            value.set(DATA[i % DATA.length]);
            writer.append(key, value);
        }
    } finally {
        IOUtils.closeStream(writer);
    }
}

From source file:io.aos.hdfs.SequenceFileReadDemo.java

License:Apache License

public static void main(String... args) throws IOException {
    String uri = args[0];/*w ww  . j  a v  a2  s .  co  m*/
    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(URI.create(uri), conf);
    Path path = new Path(uri);

    SequenceFile.Reader reader = null;
    try {
        reader = new SequenceFile.Reader(fs, path, conf);
        Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
        Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
        long position = reader.getPosition();
        while (reader.next(key, value)) {
            String syncSeen = reader.syncSeen() ? "*" : "";
            System.out.printf("[%s%s]\t%s\t%s\n", position, syncSeen, key, value);
            position = reader.getPosition(); // beginning of next record
        }
    } finally {
        IOUtils.closeStream(reader);
    }
}