Example usage for org.apache.hadoop.fs FileSystem copyFromLocalFile

List of usage examples for org.apache.hadoop.fs FileSystem copyFromLocalFile

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileSystem copyFromLocalFile.

Prototype

public void copyFromLocalFile(Path src, Path dst) throws IOException 

Source Link

Document

The src file is on the local disk.

Usage

From source file:fr.jetoile.hadoopunit.integrationtest.SparkIntegrationTest.java

License:Apache License

@Before
public void before() throws IOException, URISyntaxException {
    FileSystem fileSystem = HdfsUtils.INSTANCE.getFileSystem();

    fileSystem.mkdirs(new Path("/khanh/test"));
    fileSystem.mkdirs(new Path("/khanh/test_parquet"));
    fileSystem.copyFromLocalFile(
            new Path(SparkIntegrationTest.class.getClassLoader().getResource("test.csv").toURI()),
            new Path("/khanh/test/test.csv"));

    new HiveSetup(HiveConnectionUtils.INSTANCE.getDestination(), Operations.sequenceOf(CREATE_TABLES)).launch();
}

From source file:fr.jetoile.hadoopunit.integrationtest.SparkSolrIntegrationTest.java

License:Apache License

@Before
public void before() throws IOException, URISyntaxException {
    FileSystem fileSystem = HdfsUtils.INSTANCE.getFileSystem();

    fileSystem.mkdirs(new Path("/khanh/test"));
    fileSystem.mkdirs(new Path("/khanh/test_parquet"));
    fileSystem.copyFromLocalFile(
            new Path(SparkSolrIntegrationTest.class.getClassLoader().getResource("test.csv").toURI()),
            new Path("/khanh/test/test.csv"));

    new HiveSetup(HiveConnectionUtils.INSTANCE.getDestination(), Operations.sequenceOf(CREATE_TABLES)).launch();
}

From source file:fr.jetoile.hadoopunit.sample.ParquetToSolrJobIntegrationTest.java

License:Apache License

@Before
public void before() throws IOException, URISyntaxException {
    FileSystem fileSystem = HdfsUtils.INSTANCE.getFileSystem();

    fileSystem.mkdirs(new Path("/khanh/test"));
    fileSystem.mkdirs(new Path("/khanh/test_parquet"));
    fileSystem.copyFromLocalFile(
            new Path(ParquetToSolrJobIntegrationTest.class.getClassLoader().getResource("test.csv").toURI()),
            new Path("/khanh/test/test.csv"));
}

From source file:fr.jetoile.hadoopunit.sample.ParquetToSolrJobTest.java

License:Apache License

@Before
public void before() throws IOException, URISyntaxException {
    FileSystem fileSystem = HdfsUtils.INSTANCE.getFileSystem();

    fileSystem.mkdirs(new Path("/khanh/test"));
    fileSystem.mkdirs(new Path("/khanh/test_parquet"));
    fileSystem.copyFromLocalFile(
            new Path(ParquetToSolrJobTest.class.getClassLoader().getResource("test.csv").toURI()),
            new Path("/khanh/test/test.csv"));

    new HiveSetup(HiveConnectionUtils.INSTANCE.getDestination(), Operations.sequenceOf(CREATE_TABLES)).launch();
}

From source file:fr.jetoile.hadoopunit.sample.SparkJobIntegrationTest.java

License:Apache License

@Before
public void before() throws IOException, URISyntaxException {
    FileSystem fileSystem = HdfsUtils.INSTANCE.getFileSystem();

    fileSystem.mkdirs(new Path("/khanh/test"));
    fileSystem.mkdirs(new Path("/khanh/test_parquet"));
    fileSystem.copyFromLocalFile(
            new Path(SparkJobIntegrationTest.class.getClassLoader().getResource("test.csv").toURI()),
            new Path("/khanh/test/test.csv"));

    new HiveSetup(HiveConnectionUtils.INSTANCE.getDestination(), Operations.sequenceOf(CREATE_TABLES)).launch();
}

From source file:fr.jetoile.hadoopunit.sample.SparkJobTest.java

License:Apache License

@Before
public void before() throws IOException, URISyntaxException {
    FileSystem fileSystem = HdfsUtils.INSTANCE.getFileSystem();

    fileSystem.mkdirs(new Path("/khanh/test"));
    fileSystem.mkdirs(new Path("/khanh/test_parquet"));
    fileSystem.copyFromLocalFile(new Path(SparkJobTest.class.getClassLoader().getResource("test.csv").toURI()),
            new Path("/khanh/test/test.csv"));

    new HiveSetup(HiveConnectionUtils.INSTANCE.getDestination(), Operations.sequenceOf(CREATE_TABLES)).launch();
}

From source file:hadoopdemo.Hadoop.java

private void addFileButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_addFileButtonActionPerformed
    try {/*from   w w w. j  av  a  2  s . c o m*/
        String addFile = addFileTextField.getText();
        String hadoopPath = hadoopPathTextField.getText();
        System.out.println(addFile);

        Configuration conf = new Configuration();
        conf.addResource(new Path("/home/ado/hadoop-2.7.3/etc/hadoop/core-site.xml"));
        conf.addResource(new Path("/home/ado/hadoop-2.7.3/etc/hadoop/hdfs-site.xml"));
        conf.addResource(new Path("/home/ado/hadoop-2.7.3/etc/hadoop/mapred-site.xml"));

        FileSystem fileSystem = FileSystem.get(conf);
        fileSystem.copyFromLocalFile(new Path(addFile), new Path(hadoopPath));
    } catch (IOException ex) {
        Logger.getLogger(Hadoop.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:hdfs.FileSystemAPI.java

License:Open Source License

/**
 * @param args/*  w ww . j  a v a2 s  .co  m*/
 * @throws IOException
 */
public static void main(String[] args) throws IOException {

    String uri = "hdfs://localhost:9000/";

    Configuration conf = new Configuration();

    FileSystem fs = FileSystem.get(URI.create(uri), conf);

    fs.copyFromLocalFile(new Path("/tmp/hf"), new Path("/abc/dir1"));

}

From source file:hydrograph.engine.spark.datasource.utils.AWSS3Util.java

License:Apache License

public void download(RunFileTransferEntity runFileTransferEntity) {
    log.debug("Start AWSS3Util download");

    File filecheck = new File(runFileTransferEntity.getLocalPath());
    if (runFileTransferEntity.getFailOnError())
        if (!(filecheck.exists() && filecheck.isDirectory())
                && !(runFileTransferEntity.getLocalPath().contains("hdfs://"))) {
            throw new AWSUtilException("Invalid local path");
        }//  w  ww  .  j a va2 s  .c  o  m
    boolean fail_if_exist = false;
    int retryAttempt = 0;
    int i;
    String amazonFileUploadLocationOriginal = null;
    String keyName = null;
    if (runFileTransferEntity.getRetryAttempt() == 0)
        retryAttempt = 1;
    else
        retryAttempt = runFileTransferEntity.getRetryAttempt();
    for (i = 0; i < retryAttempt; i++) {
        log.info("connection attempt: " + (i + 1));
        try {

            AmazonS3 s3Client = null;
            ClientConfiguration clientConf = new ClientConfiguration();
            clientConf.setProtocol(Protocol.HTTPS);
            if (runFileTransferEntity.getCrediationalPropertiesFile() == null) {
                BasicAWSCredentials creds = new BasicAWSCredentials(runFileTransferEntity.getAccessKeyID(),
                        runFileTransferEntity.getSecretAccessKey());
                s3Client = AmazonS3ClientBuilder.standard().withClientConfiguration(clientConf)
                        .withRegion(runFileTransferEntity.getRegion())
                        .withCredentials(new AWSStaticCredentialsProvider(creds)).build();
            } else {

                File securityFile = new File(runFileTransferEntity.getCrediationalPropertiesFile());

                PropertiesCredentials creds = new PropertiesCredentials(securityFile);

                s3Client = AmazonS3ClientBuilder.standard().withClientConfiguration(clientConf)
                        .withRegion(runFileTransferEntity.getRegion())
                        .withCredentials(new AWSStaticCredentialsProvider(creds)).build();
            }
            String s3folderName = null;
            String filepath = runFileTransferEntity.getFolder_name_in_bucket();
            if (filepath.lastIndexOf("/") != -1) {
                s3folderName = filepath.substring(0, filepath.lastIndexOf("/"));
                keyName = filepath.substring(filepath.lastIndexOf("/") + 1);

            } else {

                keyName = filepath;

            }
            log.debug("keyName is: " + keyName);
            log.debug("bucket name is:" + runFileTransferEntity.getBucketName());
            log.debug("Folder Name is" + runFileTransferEntity.getFolder_name_in_bucket());
            if (s3folderName != null) {
                amazonFileUploadLocationOriginal = runFileTransferEntity.getBucketName() + "/" + s3folderName;
            } else {
                amazonFileUploadLocationOriginal = runFileTransferEntity.getBucketName();
            }
            if (runFileTransferEntity.getLocalPath().contains("hdfs://")) {
                String outputPath = runFileTransferEntity.getLocalPath();
                String s1 = outputPath.substring(7, outputPath.length());
                String s2 = s1.substring(0, s1.indexOf("/"));
                File f = new File("/tmp");
                if (!f.exists())
                    f.mkdir();

                GetObjectRequest request = new GetObjectRequest(amazonFileUploadLocationOriginal, keyName);
                S3Object object = s3Client.getObject(request);
                if (runFileTransferEntity.getEncoding() != null)
                    object.getObjectMetadata().setContentEncoding(runFileTransferEntity.getEncoding());
                File fexist = new File(runFileTransferEntity.getLocalPath() + File.separatorChar + keyName);
                if (runFileTransferEntity.getOverwrite().trim().equalsIgnoreCase("Overwrite If Exists")) {
                    S3ObjectInputStream objectContent = object.getObjectContent();
                    IOUtils.copyLarge(objectContent, new FileOutputStream("/tmp/" + keyName));
                } else {
                    if (!(fexist.exists() && !fexist.isDirectory())) {
                        S3ObjectInputStream objectContent = object.getObjectContent();
                        IOUtils.copyLarge(objectContent, new FileOutputStream(
                                runFileTransferEntity.getLocalPath() + File.separatorChar + keyName));
                    } else {
                        fail_if_exist = true;
                        Log.error("File already exists");
                        throw new AWSUtilException("File already exists");
                    }
                }

                Configuration conf = new Configuration();
                conf.set("fs.defaultFS", "hdfs://" + s2);
                FileSystem hdfsFileSystem = FileSystem.get(conf);

                String s = outputPath.substring(7, outputPath.length());
                String hdfspath = s.substring(s.indexOf("/"), s.length());

                Path local = new Path("/tmp/" + keyName);
                Path hdfs = new Path(hdfspath);
                hdfsFileSystem.copyFromLocalFile(local, hdfs);

            } else {

                GetObjectRequest request = new GetObjectRequest(amazonFileUploadLocationOriginal, keyName);
                S3Object object = s3Client.getObject(request);
                if (runFileTransferEntity.getEncoding() != null)
                    object.getObjectMetadata().setContentEncoding(runFileTransferEntity.getEncoding());
                File fexist = new File(runFileTransferEntity.getLocalPath() + File.separatorChar + keyName);
                if (runFileTransferEntity.getOverwrite().trim().equalsIgnoreCase("Overwrite If Exists")) {
                    S3ObjectInputStream objectContent = object.getObjectContent();
                    IOUtils.copyLarge(objectContent, new FileOutputStream(
                            runFileTransferEntity.getLocalPath() + File.separatorChar + keyName));
                }

                else {
                    if (!(fexist.exists() && !fexist.isDirectory())) {
                        S3ObjectInputStream objectContent = object.getObjectContent();
                        IOUtils.copyLarge(objectContent, new FileOutputStream(
                                runFileTransferEntity.getLocalPath() + File.separatorChar + keyName));
                    } else {
                        fail_if_exist = true;
                        Log.error("File already exists");
                        throw new AWSUtilException("File already exists");
                    }
                }

            }
        }

        catch (AmazonServiceException e) {
            log.error("Amazon Service Exception", e);
            if (e.getStatusCode() == 403 || e.getStatusCode() == 404) {
                if (runFileTransferEntity.getFailOnError()) {
                    Log.error("Incorrect details provided.Please provide correct details", e);
                    throw new AWSUtilException("Incorrect details provided");
                } else {
                    Log.error("Unknown amezon exception occured", e);
                }

            }

            {
                try {
                    Thread.sleep(runFileTransferEntity.getRetryAfterDuration());
                } catch (Exception e1) {
                    Log.error("Exception occured while sleeping the thread");
                }
                continue;
            }

        } catch (Error e) {
            Log.error("Error occured while sleeping the thread");
            throw new AWSUtilException(e);
        } catch (Exception e) {
            log.error("error while transfering file", e);
            try {
                Thread.sleep(runFileTransferEntity.getRetryAfterDuration());
            } catch (Exception e1) {

            } catch (Error err) {
                Log.error("Error occured while downloading");
                throw new AWSUtilException(err);
            }
            continue;
        }
        done = true;
        break;
    }

    if (runFileTransferEntity.getFailOnError() && !done) {
        log.error("File transfer failed");
        throw new AWSUtilException("File transfer failed");
    } else if (!done) {
        log.error("File transfer failed but mentioned fail on error as false");
    }
    if (i == runFileTransferEntity.getRetryAttempt()) {
        if (runFileTransferEntity.getFailOnError()) {
            throw new AWSUtilException("File transfer failed");
        }
    }
    log.debug("Finished AWSS3Util download");
}

From source file:hydrograph.engine.spark.datasource.utils.FTPUtil.java

License:Apache License

public void download(RunFileTransferEntity runFileTransferEntity) {
    log.debug("Start FTPUtil download");

    File filecheck = new File(runFileTransferEntity.getOutFilePath());
    if (!(filecheck.exists() && filecheck.isDirectory())
            && !(runFileTransferEntity.getOutFilePath().contains("hdfs://"))) {
        log.error("Invalid output file path. Please provide valid output file path.");
        throw new RuntimeException("Invalid output path");
    }/*from   ww  w .  j  ava2  s .c  o m*/
    boolean fail_if_exist = false;
    FTPClient ftpClient = new FTPClient();
    int retryAttempt = runFileTransferEntity.getRetryAttempt();
    int attemptCount = 1;
    int i = 0;
    boolean login = false;
    boolean done = false;
    for (i = 0; i < retryAttempt; i++) {
        try {
            log.info("Connection attempt: " + (i + 1));
            if (runFileTransferEntity.getTimeOut() != 0)
                ftpClient.setConnectTimeout(runFileTransferEntity.getTimeOut());
            log.debug("connection details: " + "/n" + "Username: " + runFileTransferEntity.getUserName() + "/n"
                    + "HostName " + runFileTransferEntity.getHostName() + "/n" + "Portno"
                    + runFileTransferEntity.getPortNo());
            ftpClient.connect(runFileTransferEntity.getHostName(), runFileTransferEntity.getPortNo());

            login = ftpClient.login(runFileTransferEntity.getUserName(), runFileTransferEntity.getPassword());

            if (!login) {
                log.error("Invalid FTP details provided. Please provide correct FTP details.");
                throw new FTPUtilException("Invalid FTP details");
            }
            ftpClient.enterLocalPassiveMode();
            if (runFileTransferEntity.getEncoding() != null)
                ftpClient.setControlEncoding(runFileTransferEntity.getEncoding());
            ftpClient.setFileType(FTP.BINARY_FILE_TYPE);
            if (runFileTransferEntity.getOutFilePath().contains("hdfs://")) {
                log.debug("Processing for HDFS output path");
                String outputPath = runFileTransferEntity.getOutFilePath();
                String s1 = outputPath.substring(7, outputPath.length());
                String s2 = s1.substring(0, s1.indexOf("/"));
                File f = new File("/tmp");
                if (!f.exists()) {
                    f.mkdir();
                }

                int index = runFileTransferEntity.getInputFilePath()
                        .replaceAll(Matcher.quoteReplacement("\\"), "/").lastIndexOf('/');
                String file_name = runFileTransferEntity.getInputFilePath().substring(index + 1);

                File isfile = new File(runFileTransferEntity.getOutFilePath() + "\\" + file_name);
                if (runFileTransferEntity.getOverwrite().equalsIgnoreCase("Overwrite If Exists")) {

                    OutputStream outputStream = new FileOutputStream("/tmp/" + file_name);
                    done = ftpClient.retrieveFile(runFileTransferEntity.getInputFilePath(), outputStream);
                    outputStream.close();
                } else {
                    if (!(isfile.exists() && !isfile.isDirectory())) {
                        OutputStream outputStream = new FileOutputStream("/tmp/" + file_name);

                        done = ftpClient.retrieveFile(runFileTransferEntity.getInputFilePath(), outputStream);
                        outputStream.close();
                    } else {
                        fail_if_exist = true;
                        throw new RuntimeException("File already exists");
                    }
                }

                Configuration conf = new Configuration();
                conf.set("fs.defaultFS", "hdfs://" + s2);
                FileSystem hdfsFileSystem = FileSystem.get(conf);

                String s = outputPath.substring(7, outputPath.length());
                String hdfspath = s.substring(s.indexOf("/"), s.length());

                Path local = new Path("/tmp/" + file_name);
                Path hdfs = new Path(hdfspath);
                hdfsFileSystem.copyFromLocalFile(local, hdfs);

            } else {
                int index = runFileTransferEntity.getInputFilePath()
                        .replaceAll(Matcher.quoteReplacement("\\"), "/").lastIndexOf('/');
                String file_name = runFileTransferEntity.getInputFilePath().substring(index + 1);

                File isfile = new File(runFileTransferEntity.getOutFilePath() + File.separatorChar + file_name);
                if (runFileTransferEntity.getOverwrite().equalsIgnoreCase("Overwrite If Exists")) {

                    OutputStream outputStream = new FileOutputStream(runFileTransferEntity.getOutFilePath()
                            .replaceAll(Matcher.quoteReplacement("\\"), "/") + "/" + file_name);
                    done = ftpClient.retrieveFile(runFileTransferEntity.getInputFilePath(), (outputStream));
                    outputStream.close();
                } else {

                    if (!(isfile.exists() && !isfile.isDirectory())) {

                        OutputStream outputStream = new FileOutputStream(
                                runFileTransferEntity.getOutFilePath().replaceAll(
                                        Matcher.quoteReplacement("\\"), "/") + File.separatorChar + file_name);

                        done = ftpClient.retrieveFile(runFileTransferEntity.getInputFilePath(), outputStream);
                        outputStream.close();
                    } else {
                        fail_if_exist = true;
                        Log.error("File already exits");
                        throw new FTPUtilException("File already exists");

                    }

                }
            }
        } catch (Exception e) {
            log.error("error while transferring the file", e);
            if (!login) {
                log.error("Login ");

                throw new FTPUtilException("Invalid FTP details");
            }
            if (fail_if_exist) {
                log.error("File already exists ");
                throw new FTPUtilException("File already exists");
            }
            try {
                Thread.sleep(runFileTransferEntity.getRetryAfterDuration());
            } catch (Exception e1) {
                Log.error("Exception occured during sleep");
            } catch (Error err) {
                log.error("fatal error", e);
                throw new FTPUtilException(err);
            }
            continue;
        }

        break;

    }

    if (i == runFileTransferEntity.getRetryAttempt()) {

        try {
            if (ftpClient != null) {
                ftpClient.logout();
                ftpClient.disconnect();

            }
        } catch (Exception e) {

            Log.error("Exception while closing the ftp client", e);
        }
        if (runFileTransferEntity.getFailOnError())
            throw new FTPUtilException("File transfer failed ");

    }

    log.debug("Finished FTPUtil download");

}