Example usage for org.apache.hadoop.fs FileSystem copyFromLocalFile

List of usage examples for org.apache.hadoop.fs FileSystem copyFromLocalFile

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileSystem copyFromLocalFile.

Prototype

public void copyFromLocalFile(Path src, Path dst) throws IOException 

Source Link

Document

The src file is on the local disk.

Usage

From source file:org.apache.accumulo.test.ShellServerIT.java

License:Apache License

private void setupRealContextPath() throws Exception {
    // Copy the test iterators jar to tmp
    Path baseDir = new Path(System.getProperty("user.dir"));
    Path targetDir = new Path(baseDir, "target");
    Path jarPath = new Path(targetDir, "TestJar-Iterators.jar");
    Path dstPath = new Path(REAL_CONTEXT_CLASSPATH);
    FileSystem fs = SharedMiniClusterBase.getCluster().getFileSystem();
    fs.copyFromLocalFile(jarPath, dstPath);
}

From source file:org.apache.accumulo.test.ShellServerIT.java

License:Apache License

private void setupFakeContextPath() throws Exception {
    // Copy the test iterators jar to tmp
    Path baseDir = new Path(System.getProperty("user.dir"));
    Path targetDir = new Path(baseDir, "target");
    Path classesDir = new Path(targetDir, "classes");
    Path jarPath = new Path(classesDir, "ShellServerIT-iterators.jar");
    Path dstPath = new Path(FAKE_CONTEXT_CLASSPATH);
    FileSystem fs = SharedMiniClusterBase.getCluster().getFileSystem();
    fs.copyFromLocalFile(jarPath, dstPath);
}

From source file:org.apache.airavata.gfac.hadoop.handler.HDFSDataMovementHandler.java

License:Apache License

private void handleInPath(JobExecutionContext jobExecutionContext) throws GFacHandlerException, IOException {
    ApplicationDeploymentDescriptionType appDepDesc = jobExecutionContext.getApplicationContext()
            .getApplicationDeploymentDescription().getType();
    HadoopApplicationDeploymentDescriptionType hadoopAppDesc = (HadoopApplicationDeploymentDescriptionType) appDepDesc;
    if (appDepDesc.isSetInputDataDirectory() && isInputDataDirectoryLocal(appDepDesc)) {
        Configuration hadoopConf = HadoopUtils.createHadoopConfiguration(jobExecutionContext,
                isWhirrBasedDeployment, hadoopConfigDir);
        FileSystem hdfs = FileSystem.get(hadoopConf);
        hdfs.copyFromLocalFile(new Path(appDepDesc.getInputDataDirectory()),
                new Path(hadoopAppDesc.getHadoopJobConfiguration().getHdfsInputDirectory()));
    }//from  w  w w.j a v a  2  s  . c om
}

From source file:org.apache.ambari.fast_hdfs_resource.Resource.java

License:Apache License

public static void createResource(Resource resource, FileSystem dfs, Path pathHadoop) throws IOException {

    boolean isCreate = (resource.getSource() == null) ? true : false;

    if (isCreate && resource.getType().equals("directory")) {
        dfs.mkdirs(pathHadoop); // empty dir(s)
    } else if (isCreate && resource.getType().equals("file")) {
        dfs.createNewFile(pathHadoop); // empty file
    } else {// w  w  w .ja  va  2 s  . c  o  m
        if (dfs.exists(pathHadoop) && dfs.getFileStatus(pathHadoop).isDir()) {
            System.out.println(
                    "Skipping copy from local, as target " + pathHadoop + " is an existing directory."); // Copy from local to existing directory is not supported by dfs.
        } else {
            dfs.copyFromLocalFile(new Path(resource.getSource()), pathHadoop);
        }
    }
}

From source file:org.apache.crunch.io.hbase.WordCountHBaseIT.java

License:Apache License

@Before
public void setUp() throws Exception {
    Configuration conf = hbaseTestUtil.getConfiguration();
    conf.set("hadoop.log.dir", tmpDir.getFileName("logs"));
    conf.set("hadoop.tmp.dir", tmpDir.getFileName("hadoop-tmp"));
    conf.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/1");
    conf.setInt("hbase.master.info.port", -1);
    conf.setInt("hbase.regionserver.info.port", -1);

    // Workaround for HBASE-5711, we need to set config value dfs.datanode.data.dir.perm
    // equal to the permissions of the temp dirs on the filesystem. These temp dirs were
    // probably created using this process' umask. So we guess the temp dir permissions as
    // 0777 & ~umask, and use that to set the config value.
    try {//from   w ww. j av  a 2s  .co  m
        Process process = Runtime.getRuntime().exec("/bin/sh -c umask");
        BufferedReader br = new BufferedReader(new InputStreamReader(process.getInputStream()));
        int rc = process.waitFor();
        if (rc == 0) {
            String umask = br.readLine();

            int umaskBits = Integer.parseInt(umask, 8);
            int permBits = 0x1ff & ~umaskBits;
            String perms = Integer.toString(permBits, 8);

            conf.set("dfs.datanode.data.dir.perm", perms);
        }
    } catch (Exception e) {
        // ignore errors, we might not be running on POSIX, or "sh" might not be on the path
    }

    hbaseTestUtil.startMiniZKCluster();
    hbaseTestUtil.startMiniCluster();
    hbaseTestUtil.startMiniMapReduceCluster(1);

    // For Hadoop-2.0.0, we have to do a bit more work.
    if (TaskAttemptContext.class.isInterface()) {
        conf = hbaseTestUtil.getConfiguration();
        FileSystem fs = FileSystem.get(conf);
        Path tmpPath = new Path("target", "WordCountHBaseTest-tmpDir");
        FileSystem localFS = FileSystem.getLocal(conf);
        for (FileStatus jarFile : localFS.listStatus(new Path("target/lib/"))) {
            Path target = new Path(tmpPath, jarFile.getPath().getName());
            fs.copyFromLocalFile(jarFile.getPath(), target);
            DistributedCache.addFileToClassPath(target, conf, fs);
        }

        // Create a programmatic container for this jar.
        JarOutputStream jos = new JarOutputStream(new FileOutputStream("WordCountHBaseIT.jar"));
        File baseDir = new File("target/test-classes");
        String prefix = "org/apache/crunch/io/hbase/";
        jarUp(jos, baseDir, prefix + "WordCountHBaseIT.class");
        jarUp(jos, baseDir, prefix + "WordCountHBaseIT$1.class");
        jarUp(jos, baseDir, prefix + "WordCountHBaseIT$2.class");
        jarUp(jos, baseDir, prefix + "WordCountHBaseIT$3.class");
        jarUp(jos, baseDir, prefix + "WordCountHBaseIT$StringifyFn.class");

        // Now for the OutputFormat (doesn't get copied by default, apparently)
        baseDir = new File("target/classes");
        jarUp(jos, baseDir, prefix + "TableOutputFormat.class");
        jarUp(jos, baseDir, prefix + "TableOutputFormat$TableRecordWriter.class");
        jos.close();

        Path target = new Path(tmpPath, "WordCountHBaseIT.jar");
        fs.copyFromLocalFile(true, new Path("WordCountHBaseIT.jar"), target);
        DistributedCache.addFileToClassPath(target, conf, fs);
    }
}

From source file:org.apache.crunch.WordCountHBaseTest.java

License:Apache License

@SuppressWarnings("deprecation")
@Before//from w w  w.  j  a v  a2  s.  co m
public void setUp() throws Exception {
    Configuration conf = hbaseTestUtil.getConfiguration();
    File tmpDir = File.createTempFile("logdir", "");
    tmpDir.delete();
    tmpDir.mkdir();
    tmpDir.deleteOnExit();
    conf.set("hadoop.log.dir", tmpDir.getAbsolutePath());
    conf.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/1");
    conf.setInt("hbase.master.info.port", -1);
    conf.setInt("hbase.regionserver.info.port", -1);

    hbaseTestUtil.startMiniZKCluster();
    hbaseTestUtil.startMiniCluster();
    hbaseTestUtil.startMiniMapReduceCluster(1);

    // For Hadoop-2.0.0, we have to do a bit more work.
    if (TaskAttemptContext.class.isInterface()) {
        conf = hbaseTestUtil.getConfiguration();
        FileSystem fs = FileSystem.get(conf);
        Path tmpPath = new Path("target", "WordCountHBaseTest-tmpDir");
        FileSystem localFS = FileSystem.getLocal(conf);
        for (FileStatus jarFile : localFS.listStatus(new Path("target/lib/"))) {
            Path target = new Path(tmpPath, jarFile.getPath().getName());
            fs.copyFromLocalFile(jarFile.getPath(), target);
            DistributedCache.addFileToClassPath(target, conf, fs);
        }

        // Create a programmatic container for this jar.
        JarOutputStream jos = new JarOutputStream(new FileOutputStream("WordCountHBaseTest.jar"));
        File baseDir = new File("target/test-classes");
        String prefix = "org/apache/crunch/";
        jarUp(jos, baseDir, prefix + "WordCountHBaseTest.class");
        jarUp(jos, baseDir, prefix + "WordCountHBaseTest$1.class");
        jarUp(jos, baseDir, prefix + "WordCountHBaseTest$2.class");
        jos.close();

        Path target = new Path(tmpPath, "WordCountHBaseTest.jar");
        fs.copyFromLocalFile(true, new Path("WordCountHBaseTest.jar"), target);
        DistributedCache.addFileToClassPath(target, conf, fs);
    }
}

From source file:org.apache.drill.TestDynamicUDFSupport.java

License:Apache License

private void copyJar(FileSystem fs, Path src, Path dest, String name) throws IOException {
    Path jarPath = new Path(src, name);
    fs.copyFromLocalFile(jarPath, dest);
}

From source file:org.apache.falcon.listener.HadoopStartupListener.java

License:Apache License

private void copyShareLib() throws Exception {
    Path shareLibFSPath = new Path(
            getShareLibPath() + File.separator + SHARE_LIB_PREFIX + getTimestampDirectory());
    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(shareLibFSPath.toUri(), conf);
    if (!fs.exists(shareLibFSPath)) {
        fs.mkdirs(shareLibFSPath);/* w w w  .j a  va  2 s.  com*/
    }

    String[] actionDirectories = getLibActionDirectories();
    for (String actionDirectory : actionDirectories) {
        LOG.info("Copying Action Directory: {}", actionDirectory);
        fs.copyFromLocalFile(new Path(shareLibPath, actionDirectory), shareLibFSPath);
    }
}

From source file:org.apache.falcon.regression.core.util.HadoopUtil.java

License:Apache License

/**
 * Copies file from local place to hdfs location.
 * @param fs target filesystem//from  ww  w. j  av a2s  . c o  m
 * @param dstHdfsDir destination
 * @param srcFileLocation source location
 * @throws IOException
 */
public static void copyDataToFolder(final FileSystem fs, String dstHdfsDir, final String srcFileLocation)
        throws IOException {
    LOGGER.info(String.format("Copying local dir %s to hdfs location %s on %s", srcFileLocation, dstHdfsDir,
            fs.getUri()));
    fs.copyFromLocalFile(new Path(srcFileLocation), new Path(cutProtocol(dstHdfsDir)));
}

From source file:org.apache.falcon.regression.core.util.HadoopUtil.java

License:Apache License

/**
 * Copies data from local sources to remote directories.
 * @param fs target filesystem// ww  w.ja va 2  s  .  c  o m
 * @param folderPrefix prefix for remote directories
 * @param folderList remote directories
 * @param fileLocations sources
 * @throws IOException
 */
public static void copyDataToFolders(FileSystem fs, final String folderPrefix, List<String> folderList,
        String... fileLocations) throws IOException {
    for (final String folder : folderList) {
        String folderSpace = folder.replaceAll("/", "_");
        File file = new File(OSUtil.NORMAL_INPUT + folderSpace + ".txt");
        FileUtils.writeStringToFile(file, "folder", true);
        fs.copyFromLocalFile(new Path(file.getAbsolutePath()), new Path(folderPrefix + folder));
        if (!file.delete()) {
            LOGGER.info("delete was not successful for file: " + file);
        }
        Path[] srcPaths = new Path[fileLocations.length];
        for (int i = 0; i < srcPaths.length; ++i) {
            srcPaths[i] = new Path(fileLocations[i]);
        }
        LOGGER.info(String.format("copying  %s to %s%s on %s", Arrays.toString(srcPaths), folderPrefix, folder,
                fs.getUri()));
        fs.copyFromLocalFile(false, true, srcPaths, new Path(folderPrefix + folder));
    }
}