Example usage for org.apache.hadoop.fs FileUtil symLink

List of usage examples for org.apache.hadoop.fs FileUtil symLink

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileUtil symLink.

Prototype

public static int symLink(String target, String linkname) throws IOException 

Source Link

Document

Create a soft link between a src and destination only on a local disk.

Usage

From source file:com.scaleoutsoftware.soss.hserver.hadoop.DistributedCacheManager.java

License:Apache License

/**
 * Utility method for creating a symlink and warning on errors.
 *
 * If link is null, does nothing.//  w  w w . j  a va  2  s.  co m
 */
private void symlink(File workDir, String target, String link) throws IOException {
    if (link != null) {
        link = workDir.toString() + Path.SEPARATOR + link;
        File flink = new File(link);

        //CODE CHANGE FROM ORIGINAL FILE, BUG FIX:
        //
        //If the cleanup of the previous job failed for some reason, we can have lingering symlink,
        //pointing to the obsolete file (in that case flink.exists() == true) or to non-existant
        //file(flink.exists() == false). In the second case, the original code tried to create symlink
        //anyway causing "already exists" error. In the first case, this method used to do nothing
        //without logging it, which effectively left the old symlink in place, leading to
        //elusive bugs.
        //
        //Changes:
        //1.Try delete symlink, and log if there was a symlink to delete (it means something wrong with cleanup)
        //2. Remove the if(!flink.exist()) check before creating symlink.
        if (flink.delete()) {
            LOG.warn(String.format("Symlink already existed, deleting: %s <- %s", target, link));
        }

        LOG.info(String.format("Creating symlink: %s <- %s", target, link));
        if (0 != FileUtil.symLink(target, link)) {
            LOG.warn(String.format("Failed to create symlink: %s <- %s", target, link));
        } else {
            symlinksCreated.add(new File(link));
        }

    }
}

From source file:com.splunk.shuttl.prototype.symlink.BucketBlockSymlinkPrototypeTest.java

License:Apache License

private void doSymlinkPathInDir(File fileInDir, LocatedBlocks blockLocations, List<LocatedBlock> locatedBlocks)
        throws IOException {
    assertEquals(1, locatedBlocks.size());
    LocatedBlock locatedBlock = blockLocations.get(0);
    assertEquals(1, locatedBlock.getLocations().length);

    DatanodeInfo datanodeInfo = locatedBlock.getLocations()[0];
    ClientDatanodeProtocol createClientDatanodeProtocolProxy = HadoopFileLocationPrototypeTest
            .createClientDatanodeProtocolProxy(datanodeInfo, hadoopFileSystem.getConf(), 1000);

    BlockLocalPathInfo blockLocalPathInfo = createClientDatanodeProtocolProxy
            .getBlockLocalPathInfo(locatedBlock.getBlock(), locatedBlock.getBlockToken());
    String absolutePathToBlock = blockLocalPathInfo.getBlockPath();
    assertTrue(new File(absolutePathToBlock).exists());
    FileUtil.symLink(absolutePathToBlock, fileInDir.getAbsolutePath());
}

From source file:com.splunk.shuttl.prototype.symlink.SymLinkTest.java

License:Apache License

@Test(groups = { "fast-unit" })
public void symLink_givenExistingFile_symLinkFileToTheExistingFile() throws IOException {
    File existingFile = createFile();
    assertTrue(existingFile.exists());//  w  ww  . j  a  va  2s.c  o  m
    File theSymLink = createFilePath();
    assertFalse(theSymLink.exists());
    FileUtil.symLink(existingFile.getAbsolutePath(), theSymLink.getAbsolutePath());
    assertTrue(theSymLink.exists());
    assertNotEquals(existingFile.getAbsolutePath(), theSymLink.getAbsolutePath());
    assertEquals(existingFile.getCanonicalPath(), theSymLink.getCanonicalPath());
}

From source file:org.apache.hama.pipes.util.DistributedCacheUtil.java

License:Apache License

/**
 * Transfers DistributedCache files into the local cache files. Also creates
 * symbolic links for URIs specified with a fragment if
 * DistributedCache.getSymlinks() is true.
 * /* w  w w.jav  a  2  s . co m*/
 * @param conf The job's configuration
 * @throws IOException If a DistributedCache file cannot be found.
 */
public static final void moveLocalFiles(Configuration conf) throws IOException {
    StringBuilder files = new StringBuilder();
    boolean first = true;
    if (DistributedCache.getCacheFiles(conf) != null) {
        for (URI uri : DistributedCache.getCacheFiles(conf)) {
            if (uri != null) {
                if (!first) {
                    files.append(",");
                }
                if (null != uri.getFragment() && DistributedCache.getSymlink(conf)) {

                    FileUtil.symLink(uri.getPath(), uri.getFragment());
                    files.append(uri.getFragment()).append(",");
                }
                FileSystem hdfs = FileSystem.get(conf);
                Path pathSrc = new Path(uri.getPath());
                // LOG.info("pathSrc: " + pathSrc);
                if (hdfs.exists(pathSrc)) {
                    LocalFileSystem local = LocalFileSystem.getLocal(conf);
                    Path pathDst = new Path(local.getWorkingDirectory(), pathSrc.getName());
                    // LOG.info("user.dir: "+System.getProperty("user.dir"));
                    // LOG.info("WorkingDirectory: "+local.getWorkingDirectory());
                    // LOG.info("pathDst: " + pathDst);
                    LOG.debug("copyToLocalFile: " + pathDst);
                    hdfs.copyToLocalFile(pathSrc, pathDst);
                    local.deleteOnExit(pathDst);
                    files.append(pathDst.toUri().getPath());
                }
                first = false;
            }
        }
    }
    if (files.length() > 0) {
        // I've replaced the use of the missing setLocalFiles and
        // addLocalFiles methods (hadoop 0.23.x) with our own DistCacheUtils
        // methods which set the cache configurations directly.
        DistCacheUtils.addLocalFiles(conf, files.toString());
    }
}