Example usage for org.apache.hadoop.fs FileSystem moveFromLocalFile

List of usage examples for org.apache.hadoop.fs FileSystem moveFromLocalFile

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileSystem moveFromLocalFile.

Prototype

public void moveFromLocalFile(Path src, Path dst) throws IOException 

Source Link

Document

The src file is on the local disk.

Usage

From source file:com.asakusafw.runtime.directio.hadoop.HadoopDataSourceUtil.java

License:Apache License

private static void move(Counter counter, FileSystem fromFs, Path from, FileSystem toFs, Path to,
        boolean fromLocal) throws IOException {
    if (counter == null) {
        throw new IllegalArgumentException("counter must not be null"); //$NON-NLS-1$
    }/*  w w w .ja  va 2 s  . c o  m*/
    if (fromFs == null) {
        throw new IllegalArgumentException("fromFs must not be null"); //$NON-NLS-1$
    }
    if (from == null) {
        throw new IllegalArgumentException("from must not be null"); //$NON-NLS-1$
    }
    if (toFs == null) {
        throw new IllegalArgumentException("toFs must not be null"); //$NON-NLS-1$
    }
    if (to == null) {
        throw new IllegalArgumentException("to must not be null"); //$NON-NLS-1$
    }
    if (fromLocal && isLocalPath(from) == false) {
        throw new IllegalArgumentException("from must be on local file system"); //$NON-NLS-1$
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug(MessageFormat.format("Start moving files (from={0}, to={1})", //$NON-NLS-1$
                from, to));
    }
    Path source = fromFs.makeQualified(from);
    Path target = toFs.makeQualified(to);
    List<Path> list = createFileListRelative(counter, fromFs, source);
    if (list.isEmpty()) {
        return;
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug(MessageFormat.format("Process moving files (from={0}, to={1}, count={2})", //$NON-NLS-1$
                from, to, list.size()));
    }
    Set<Path> directoryCreated = new HashSet<>();
    for (Path path : list) {
        Path sourceFile = new Path(source, path);
        Path targetFile = new Path(target, path);
        if (LOG.isTraceEnabled()) {
            FileStatus stat = fromFs.getFileStatus(sourceFile);
            LOG.trace(MessageFormat.format("Moving file (from={0}, to={1}, size={2})", //$NON-NLS-1$
                    sourceFile, targetFile, stat.getLen()));
        }
        try {
            FileStatus stat = toFs.getFileStatus(targetFile);
            if (LOG.isDebugEnabled()) {
                LOG.debug(MessageFormat.format("Deleting file: {0}", //$NON-NLS-1$
                        targetFile));
            }
            if (FileSystemCompatibility.isDirectory(stat)) {
                toFs.delete(targetFile, true);
            } else {
                toFs.delete(targetFile, false);
            }
        } catch (FileNotFoundException e) {
            Path targetParent = targetFile.getParent();
            if (directoryCreated.contains(targetParent) == false) {
                if (LOG.isDebugEnabled()) {
                    LOG.debug(MessageFormat.format("Creating directory: {0}", //$NON-NLS-1$
                            targetParent));
                }
                toFs.mkdirs(targetParent);
                directoryCreated.add(targetParent);
            }
        }
        counter.add(1);
        if (fromLocal) {
            toFs.moveFromLocalFile(sourceFile, targetFile);
        } else {
            boolean succeed = toFs.rename(sourceFile, targetFile);
            if (succeed == false) {
                throw new IOException(
                        MessageFormat.format("Failed to move file (from={0}, to={1})", sourceFile, targetFile));
            }
        }
        counter.add(1);
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug(MessageFormat.format("Finish moving files (from={0}, to={1}, count={2})", //$NON-NLS-1$
                from, to, list.size()));
    }
}

From source file:com.cloudera.kitten.lua.AsapLuaContainerLaunchParameters.java

License:Open Source License

private void configureLocalScriptResourceForPath(LocalResource rsrc, Path path) throws IOException {
    //System.out.println("URI: "+path.toUri());
    FileSystem fs = FileSystem.get(conf);

    Path dst = new Path(dir + "/" + path.getName());
    fs.moveFromLocalFile(path, dst);
    dst = fs.makeQualified(dst);//from   w  w  w. j a va  2 s  .c  o  m

    FileStatus stat = fs.getFileStatus(dst);
    rsrc.setSize(stat.getLen());
    rsrc.setTimestamp(stat.getModificationTime());
    rsrc.setResource(ConverterUtils.getYarnUrlFromPath(dst));
}

From source file:com.redsqirl.workflow.server.action.superaction.SubWorkflow.java

License:Open Source License

public String save(String filePath, Boolean newPrivilege) throws RemoteException {
    String error = null;/*from   www  .j a  v  a  2s. c  om*/
    try {
        String[] path = filePath.split("/");
        String fileName = path[path.length - 1];
        String tempPath = WorkflowPrefManager.getPathuserpref() + "/tmp/" + fileName + "_"
                + RandomString.getRandomName(4);

        logger.info(filePath);
        error = saveXmlOnLocal(new File(tempPath), newPrivilege);

        if (error == null) {
            if (!filePath.endsWith(".srs")) {
                filePath = filePath + ".srs";
            }
            FileSystem fs = NameNodeVar.getFS();
            fs.moveFromLocalFile(new Path(tempPath), new Path(filePath));
        }
    } catch (Exception e) {
        error = "Exception while saving a Super Action";
        logger.error(error, e);
    }

    return error;
}

From source file:com.redsqirl.workflow.server.Workflow.java

License:Open Source License

/**
 * Save the xml part of a workflow./*from   w  w  w.j a  va 2 s.  c o  m*/
 * 
 * @param filePath
 *            the xml file path to write in.
 * @return null if OK, or a description of the error.
 * @throws RemoteException
 */
public String save(final String filePath) throws RemoteException {
    String error = null;
    File file = null;

    try {
        String[] path = filePath.split("/");
        String fileName = path[path.length - 1];
        String tempPath = WorkflowPrefManager.getPathuserpref() + "/tmp/" + fileName + "_"
                + RandomString.getRandomName(4);
        file = new File(tempPath);
        logger.debug("Save xml: " + file.getAbsolutePath());
        file.getParentFile().mkdirs();
        Document doc = null;
        try {
            doc = saveInXML();
        } catch (IOException e) {
            error = e.getMessage();
        }

        if (error == null) {
            logger.debug("write the file...");
            // write the content into xml file
            logger.debug("Check Null text nodes...");
            XmlUtils.checkForNullTextNodes(doc.getDocumentElement(), "");
            TransformerFactory transformerFactory = TransformerFactory.newInstance();
            Transformer transformer = transformerFactory.newTransformer();
            transformer.setOutputProperty(OutputKeys.INDENT, "yes");
            DOMSource source = new DOMSource(doc);
            StreamResult result = new StreamResult(file);
            logger.debug(4);
            transformer.transform(source, result);
            logger.debug(5);

            FileSystem fs = NameNodeVar.getFS();
            fs.moveFromLocalFile(new Path(tempPath), new Path(filePath));

            if (filePath.startsWith(WorkflowPrefManager.getBackupPath())) {
                saved = false;
                this.path = null;
            } else {
                this.path = filePath;
                saved = true;
                changed = false;
                String bckPath = getBackupName(createBackupDir());
                FileUtil.copy(fs, new Path(filePath), fs, new Path(bckPath), false, NameNodeVar.getConf());
                cleanUpBackup();
            }

            logger.debug("file saved successfully");
        }
    } catch (Exception e) {
        error = LanguageManagerWF.getText("workflow.writeXml", new Object[] { e.getMessage() });
        logger.error(error, e);
        try {
            logger.debug("Attempt to delete " + file.getAbsolutePath());
            file.delete();
        } catch (Exception e1) {
        }
    }
    Log.flushAllLogs();

    return error;
}

From source file:kafka.etl.KafkaETLUtils.java

License:Apache License

public static void writeText(FileSystem fs, Path outPath, String content) throws IOException {
    long timestamp = System.currentTimeMillis();
    String localFile = "/tmp/KafkaETL_tmp_" + timestamp;
    PrintWriter writer = new PrintWriter(new FileWriter(localFile));
    writer.println(content);//from   w  w  w  .ja v  a2 s . c  o  m
    writer.close();

    Path src = new Path(localFile);
    fs.moveFromLocalFile(src, outPath);
}

From source file:org.apache.accumulo.test.ShellServerIT.java

License:Apache License

@Test
public void exporttableImporttable() throws Exception {
    final String table = name.getMethodName(), table2 = table + "2";

    // exporttable / importtable
    ts.exec("createtable " + table + " -evc", true);
    make10();//from  ww  w  .  j a va2s . c  om
    ts.exec("addsplits row5", true);
    ts.exec("config -t " + table + " -s table.split.threshold=345M", true);
    ts.exec("offline " + table, true);
    File exportDir = new File(rootPath, "ShellServerIT.export");
    String exportUri = "file://" + exportDir.toString();
    String localTmp = "file://" + new File(rootPath, "ShellServerIT.tmp").toString();
    ts.exec("exporttable -t " + table + " " + exportUri, true);
    DistCp cp = newDistCp(new Configuration(false));
    String import_ = "file://" + new File(rootPath, "ShellServerIT.import").toString();
    if (getCluster().getClientConfig().getBoolean(ClientProperty.INSTANCE_RPC_SASL_ENABLED.getKey(), false)) {
        // DistCp bugs out trying to get a fs delegation token to perform the cp. Just copy it ourselves by hand.
        FileSystem fs = getCluster().getFileSystem();
        FileSystem localFs = FileSystem.getLocal(new Configuration(false));

        // Path on local fs to cp into
        Path localTmpPath = new Path(localTmp);
        localFs.mkdirs(localTmpPath);

        // Path in remote fs to importtable from
        Path importDir = new Path(import_);
        fs.mkdirs(importDir);

        // Implement a poor-man's DistCp
        try (BufferedReader reader = new BufferedReader(new FileReader(new File(exportDir, "distcp.txt")))) {
            for (String line; (line = reader.readLine()) != null;) {
                Path exportedFile = new Path(line);
                // There isn't a cp on FileSystem??
                log.info("Copying " + line + " to " + localTmpPath);
                fs.copyToLocalFile(exportedFile, localTmpPath);
                Path tmpFile = new Path(localTmpPath, exportedFile.getName());
                log.info("Moving " + tmpFile + " to the import directory " + importDir);
                fs.moveFromLocalFile(tmpFile, importDir);
            }
        }
    } else {
        String[] distCpArgs = new String[] { "-f", exportUri + "/distcp.txt", import_ };
        assertEquals("Failed to run distcp: " + Arrays.toString(distCpArgs), 0, cp.run(distCpArgs));
    }
    ts.exec("importtable " + table2 + " " + import_, true);
    ts.exec("config -t " + table2 + " -np", true, "345M", true);
    ts.exec("getsplits -t " + table2, true, "row5", true);
    ts.exec("constraint --list -t " + table2, true, "VisibilityConstraint=2", true);
    ts.exec("online " + table, true);
    ts.exec("deletetable -f " + table, true);
    ts.exec("deletetable -f " + table2, true);
}

From source file:org.apache.hama.ml.kmeans.TestKMeansBSP.java

License:Apache License

private Path generateInputText(int k, Configuration conf, FileSystem fs, Path in, Path centerIn, Path out,
        int numBspTask) throws IOException {
    int totalNumberOfPoints = 100;
    int interval = totalNumberOfPoints / numBspTask;
    Path parts = new Path(in, "parts");

    for (int part = 0; part < numBspTask; part++) {
        Path partIn = new Path(parts, "part" + part + "/input.txt");
        BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(fs.create(partIn)));

        int start = interval * part;
        int end = start + interval - 1;
        if ((numBspTask - 1) == part) {
            end = totalNumberOfPoints;//from  ww w  .  j av a2  s  .  c  o m
        }
        System.out.println("Partition " + part + ": from " + start + " to " + end);

        for (int i = start; i <= end; i++) {
            bw.append(i + "\t" + i + "\n");
        }
        bw.close();

        // Convert input text to sequence file
        Path seqFile = null;
        if (part == 0) {
            seqFile = KMeansBSP.prepareInputText(k, conf, partIn, centerIn, out, fs, false);
        } else {
            seqFile = KMeansBSP.prepareInputText(0, conf, partIn, new Path(centerIn + "_empty.seq"), out, fs,
                    false);
        }

        fs.moveFromLocalFile(seqFile, new Path(parts, "part" + part + ".seq"));
        fs.delete(seqFile.getParent(), true);
        fs.delete(partIn.getParent(), true);
    }

    return parts;
}

From source file:org.springframework.data.hadoop.fs.FsShell.java

License:Apache License

public void moveFromLocal(String localsrc, String localsrc2, String... dst) {
    Object[] va = parseVarargs(localsrc, localsrc2, dst);
    @SuppressWarnings("unchecked")
    List<Path> srcs = (List<Path>) va[0];
    Path dstPath = (Path) va[1];

    try {/*w  w  w.ja v a2 s. c o  m*/
        FileSystem dstFs = dstPath.getFileSystem(configuration);
        dstFs.moveFromLocalFile(srcs.toArray(new Path[srcs.size()]), dstPath);
    } catch (IOException ex) {
        throw new HadoopException("Cannot move resources " + ex.getMessage(), ex);
    }
}

From source file:sharedsidefunctions.CopyFromLocal.java

License:Apache License

public static void copyFromLocal(Configuration conf, String diamond, String query, String dataBase,
        String userName) throws IOException {
    FileSystem fs = FileSystem.get(conf);
    fs.moveFromLocalFile(new Path(dataBase + ".dmnd"), new Path("/user/" + userName + "/Hamond"));
    fs.copyFromLocalFile(new Path(query), new Path("/user/" + userName + "/Hamond"));
    //        fs.copyFromLocalFile(new Path(System.getProperty("user.dir")+"/diamond"), new Path(userName));
    fs.copyFromLocalFile(new Path(diamond), new Path("/user/" + userName + "/Hamond"));

    //close file system
    fs.close();/*from w  ww.  j  a  v  a2 s .  c  o m*/
}

From source file:smile.wide.facebook.ExperimentDriver.java

License:Apache License

/**
 * Runs preparation step for the experiment:
 * collects attribute values and prepares the bayes network  
 * /*w ww .  j  av  a 2 s . c  om*/
 * @return error code from execution
 * @throws SQLException
 * @throws FileNotFoundException
 */
private int prepare() {
    // collect the values contained in the dataset
    if (useHiveJDBC_) {
        try {
            collectAttValuesFromHive();
        } catch (FileNotFoundException e) {
            s_logger.error("Hive attribute/value collection failed on file error");
            e.printStackTrace();
        } catch (SQLException e) {
            s_logger.error("Hive attribute/value collection failed on SQL problem.");
            e.printStackTrace();
        }
    } else {
        collectAttValuesByMRJob();
    }

    // load the network file
    Network theNet = new Network();
    theNet.readFile(networkFile_);

    // modify it using the collected attributes
    // update the possible outcomes for every node
    for (String var : new String[] { "FirstName", "MiddleName", "Sex", "IsAppUser" }) {
        String[] origOutcomes = theNet.getOutcomeIds(var);

        s_logger.debug("In the data, " + attValueCounts_.get(var).size() + " outcomes for " + var);

        HashSet<String> normalizedValues = new HashSet<String>();

        for (String attval : attValueCounts_.get(var).keySet()) {
            String trimmed = attval.replace(".", "").trim().toLowerCase();
            if (trimmed.matches("[a-zA-Z_]\\w*")) {
                normalizedValues.add(trimmed);
            }
        }

        s_logger.debug("Normalized " + attValueCounts_.get(var).size() + " outcomes for " + var + " to "
                + normalizedValues.size());

        int i = 0;
        for (String av : normalizedValues) {
            theNet.addOutcome(var, av);
            ++i;
            if (i % 10000 == 0) {
                s_logger.info("Adding attribute # " + i + " - " + av);
            }

        }

        s_logger.debug("Deleting the original outcomes from " + var);
        for (String out : origOutcomes) {
            if (!normalizedValues.contains(out) && (theNet.getOutcomeCount(var) > 2)) {
                theNet.deleteOutcome(var, out);
            }
        }

        s_logger.debug("Setting non-zero CPT for " + var);
        // make all outcomes possible
        double[] def = theNet.getNodeDefinition(var);
        int outcomes = theNet.getOutcomeCount(var);
        int parentconf = def.length / outcomes; // how many parent configurations
        Random r = new Random();
        for (int p = 0; p < parentconf; ++p) {
            double sum = 0.0;
            double[] par = new double[outcomes];
            for (int j = 0; j < outcomes; ++j) {
                par[j] = r.nextDouble() + 0.01;
                sum += par[j];
            }
            for (int j = 0; j < outcomes; ++j) {
                def[p * outcomes + j] = par[j] / sum;
            }
        }
        theNet.setNodeDefinition(var, def);
    }

    // save a modified copy
    modifiedNetwork_ = basename(networkFile_);
    modifiedNetwork_ = modifiedNetwork_.substring(0, modifiedNetwork_.lastIndexOf(".")) + ".mod.xdsl";
    try {
        theNet.writeFile(modifiedNetwork_);
        FileSystem fs = FileSystem.get(conf_);
        fs.mkdirs(new Path(jobHDFSPath_ + "/tmp/"));
        fs.moveFromLocalFile(new Path(modifiedNetwork_), new Path(jobHDFSPath_ + "/tmp/" + modifiedNetwork_));
    } catch (IOException e) {
        s_logger.error("I/O Error recording the modified Bayes network " + modifiedNetwork_ + " to "
                + jobHDFSPath_ + "/tmp/" + modifiedNetwork_);
        e.printStackTrace();
    }

    return 0;
}