Example usage for org.apache.hadoop.yarn.api.records LocalResource newInstance

List of usage examples for org.apache.hadoop.yarn.api.records LocalResource newInstance

Introduction

In this page you can find the example usage for org.apache.hadoop.yarn.api.records LocalResource newInstance.

Prototype

@Public
    @Stable
    public static LocalResource newInstance(URL url, LocalResourceType type, LocalResourceVisibility visibility,
            long size, long timestamp) 

Source Link

Usage

From source file:cascading.flow.tez.util.TezUtil.java

License:Open Source License

protected static void addResource(Map<String, LocalResource> localResources, Map<String, String> environment,
        String fileName, FileStatus stats, Path fullPath, LocalResourceType type) throws IOException {
    if (localResources.containsKey(fileName))
        throw new FlowException("duplicate filename added to classpath resources: " + fileName);

    URL yarnUrlFromPath = ConverterUtils.getYarnUrlFromPath(fullPath);
    long len = stats.getLen();
    long modificationTime = stats.getModificationTime();

    LocalResource resource = LocalResource.newInstance(yarnUrlFromPath, type,
            LocalResourceVisibility.APPLICATION, len, modificationTime);

    if (type == LocalResourceType.PATTERN) {
        // todo: parametrize this for dynamic inclusion below
        String pattern = "(?:classes/|lib/).*";

        resource.setPattern(pattern);/*from w w w. j  ava  2s  .c o m*/

        if (environment != null) {
            String current = "";

            current += PWD.$$() + File.separator + fileName + File.separator + "*" + CLASS_PATH_SEPARATOR;
            current += PWD.$$() + File.separator + fileName + File.separator + "lib" + File.separator + "*"
                    + CLASS_PATH_SEPARATOR;
            current += PWD.$$() + File.separator + fileName + File.separator + "classes" + File.separator + "*"
                    + CLASS_PATH_SEPARATOR;

            String classPath = environment.get(CLASSPATH.name());

            if (classPath == null)
                classPath = "";
            else if (!classPath.startsWith(CLASS_PATH_SEPARATOR))
                classPath += CLASS_PATH_SEPARATOR;

            classPath += current;

            LOG.info("adding to cluster side classpath: {} ", classPath);

            environment.put(CLASSPATH.name(), classPath);
        }
    }

    localResources.put(fileName, resource);
}

From source file:com.bigjob.Client.java

License:Apache License

private void addToLocalResources(FileSystem fs, String fileSrcPath, String fileDstPath, int appId,
        Map<String, LocalResource> localResources, String resources) throws IOException {
    String suffix = appName + "/" + appId + "/" + fileDstPath;
    Path dst = new Path(fs.getHomeDirectory(), suffix);
    LOG.debug("HDFS Destination for Script: " + dst.toString());
    if (fileSrcPath == null) {
        FSDataOutputStream ostream = null;
        try {//w  ww.j  a  v a2s.  c  o  m
            ostream = FileSystem.create(fs, dst, new FsPermission((short) 0710));
            ostream.writeUTF(resources);
        } finally {
            IOUtils.closeQuietly(ostream);
        }
    } else {
        fs.copyFromLocalFile(new Path(fileSrcPath), dst);
    }
    FileStatus scFileStatus = fs.getFileStatus(dst);
    LocalResource scRsrc = LocalResource.newInstance(ConverterUtils.getYarnUrlFromURI(dst.toUri()),
            LocalResourceType.FILE, LocalResourceVisibility.APPLICATION, scFileStatus.getLen(),
            scFileStatus.getModificationTime());
    localResources.put(fileDstPath, scRsrc);
}

From source file:com.epam.hadoop.nv.yarn.Client.java

License:Apache License

private void addToLocalResources(FileSystem fs, String fileSrcPath, String fileDstPath, String appId,
        Map<String, LocalResource> localResources, String resources) throws IOException {
    String suffix = appName + "/" + appId + "/" + fileDstPath;
    Path dst = new Path(fs.getHomeDirectory(), suffix);
    if (fileSrcPath == null) {
        FSDataOutputStream ostream = null;
        try {/*from   www .  j  a  va  2s. c  o m*/
            ostream = FileSystem.create(fs, dst, new FsPermission((short) 0710));
            ostream.writeUTF(resources);
        } finally {
            IOUtils.closeQuietly(ostream);
        }
    } else {
        fs.copyFromLocalFile(new Path(fileSrcPath), dst);
    }
    FileStatus scFileStatus = fs.getFileStatus(dst);
    LocalResource scRsrc = LocalResource.newInstance(ConverterUtils.getYarnUrlFromURI(dst.toUri()),
            LocalResourceType.FILE, LocalResourceVisibility.APPLICATION, scFileStatus.getLen(),
            scFileStatus.getModificationTime());
    localResources.put(fileDstPath, scRsrc);
}

From source file:com.github.hdl.tensorflow.yarn.app.TFAmContainer.java

License:Apache License

public void addToLocalResources(FileSystem fs, Path dst, String fileDstPath,
        Map<String, LocalResource> localResources) throws IOException {
    FileStatus scFileStatus = fs.getFileStatus(dst);
    LocalResource scRsrc = LocalResource.newInstance(URL.fromURI(dst.toUri()), LocalResourceType.FILE,
            LocalResourceVisibility.APPLICATION, scFileStatus.getLen(), scFileStatus.getModificationTime());
    localResources.put(fileDstPath, scRsrc);
}

From source file:com.github.hdl.tensorflow.yarn.app.TFContainer.java

License:Apache License

public void addToLocalResources(FileSystem fs, Path dst, String fileDstPath,
        Map<String, LocalResource> localResources) throws IOException {
    FileStatus scFileStatus = fs.getFileStatus(dst);
    LOG.info("Path " + dst.toString() + "->" + " " + fileDstPath);
    LocalResource scRsrc = LocalResource.newInstance(URL.fromURI(dst.toUri()), LocalResourceType.FILE,
            LocalResourceVisibility.APPLICATION, scFileStatus.getLen(), scFileStatus.getModificationTime());
    localResources.put(fileDstPath, scRsrc);
}

From source file:com.github.hdl.tensorflow.yarn.app.TFContainer.java

License:Apache License

public void addToLocalResources(FileSystem fs, String fileSrcPath, String fileDstPath, String appId,
        Map<String, LocalResource> localResources, String resources) throws IOException {

    execCmd("pwd");
    execCmd("ls -l");
    String suffix = appName + "/" + appId + "/" + fileDstPath;
    Path dst = new Path(fs.getHomeDirectory(), suffix);
    LOG.info("copy: " + fileSrcPath + " ===> " + dst.toString());
    if (fileSrcPath == null) {
        FSDataOutputStream ostream = null;
        try {/*w  w  w.  j a  v  a 2 s.c  o  m*/
            ostream = FileSystem.create(fs, dst, new FsPermission((short) 0710));
            ostream.writeUTF(resources);
        } finally {
            IOUtils.closeQuietly(ostream);
        }
    } else {
        fs.copyFromLocalFile(new Path(fileSrcPath), dst);
    }

    FileStatus scFileStatus = fs.getFileStatus(dst);
    LocalResource scRsrc = LocalResource.newInstance(URL.fromURI(dst.toUri()), LocalResourceType.FILE,
            LocalResourceVisibility.APPLICATION, scFileStatus.getLen(), scFileStatus.getModificationTime());
    localResources.put(fileDstPath, scRsrc);
}

From source file:com.twitter.pig.backend.hadoop.executionengine.tez.TezJobControlCompiler.java

License:Apache License

public DAG createDAG(TezOperPlan tezPlan, FileSystem remoteFs, TezConfiguration conf, ApplicationId appId,
        Path remoteStagingDir) throws IOException, YarnException {

    DAG dag = new DAG("MRRSleepJob");
    /*/*  ww  w .  jav a  2  s . c o m*/
          String jarPath = ClassUtil.findContainingJar(getClass());
          Path remoteJarPath = remoteFs.makeQualified(
    new Path(remoteStagingDir, "dag_job.jar"));
          remoteFs.copyFromLocalFile(new Path(jarPath), remoteJarPath);
          FileStatus jarFileStatus = remoteFs.getFileStatus(remoteJarPath);
    */
    Map<String, LocalResource> commonLocalResources = new HashMap<String, LocalResource>();

    if (!pigContext.inIllustrator && pigContext.getExecType() != ExecType.TEZ_LOCAL) {

        // Setup the DistributedCache for this job
        for (URL extraJar : pigContext.extraJars) {
            //log.debug("Adding jar to DistributedCache: " + extraJar.toString());
            TezJobControlCompiler.putJarOnClassPathThroughDistributedCache(pigContext, conf, extraJar);
        }

        //Create the jar of all functions and classes required
        File submitJarFile = File.createTempFile("Job", ".jar");
        //log.info("creating jar file "+submitJarFile.getName());
        // ensure the job jar is deleted on exit
        submitJarFile.deleteOnExit();
        FileOutputStream fos = new FileOutputStream(submitJarFile);
        try {
            JarManager.createJar(fos, new HashSet<String>(), pigContext);
        } catch (ClassNotFoundException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

        Path remoteJarPath = remoteFs.makeQualified(new Path(remoteStagingDir, "dag_job.jar"));
        remoteFs.copyFromLocalFile(new Path(submitJarFile.getAbsolutePath()), remoteJarPath);
        FileStatus jarFileStatus = remoteFs.getFileStatus(remoteJarPath);

        LocalResource dagJarLocalRsrc = LocalResource.newInstance(
                ConverterUtils.getYarnUrlFromPath(remoteJarPath), LocalResourceType.FILE,
                LocalResourceVisibility.APPLICATION, jarFileStatus.getLen(),
                jarFileStatus.getModificationTime());
        commonLocalResources.put("dag_job.jar", dagJarLocalRsrc);

        Path remoteTezJarPath = remoteFs.makeQualified(new Path(remoteStagingDir, "pig-tez.jar"));
        remoteFs.copyFromLocalFile(new Path("pig-tez.jar"), remoteTezJarPath);
        FileStatus tezJarFileStatus = remoteFs.getFileStatus(remoteTezJarPath);

        LocalResource tezJarLocalRsrc = LocalResource.newInstance(
                ConverterUtils.getYarnUrlFromPath(remoteTezJarPath), LocalResourceType.FILE,
                LocalResourceVisibility.APPLICATION, tezJarFileStatus.getLen(),
                tezJarFileStatus.getModificationTime());
        commonLocalResources.put("pig-tez.jar", tezJarLocalRsrc);

        //log.info("jar file "+submitJarFile.getName()+" created");
        //Start setting the JobConf properties
        conf.set("mapred.jar", submitJarFile.getPath());
    }

    /*
    LocalResource dagJarLocalRsrc = LocalResource.newInstance(
    ConverterUtils.getYarnUrlFromPath(remoteJarPath),
    LocalResourceType.FILE,
    LocalResourceVisibility.APPLICATION,
    jarFileStatus.getLen(),
    jarFileStatus.getModificationTime());
    commonLocalResources.put("dag_job.jar", dagJarLocalRsrc);
    */

    Hashtable<TezOperator, Pair<Vertex, Configuration>> vertexMap = new Hashtable<TezOperator, Pair<Vertex, Configuration>>();

    List<TezOperator> operators = tezPlan.getRoots();

    // add settings for pig statistics
    String setScriptProp = conf.get(ScriptState.INSERT_ENABLED, "true");
    ScriptState ss = null;

    if (setScriptProp.equalsIgnoreCase("true")) {
        ss = ScriptState.get();
    }

    while (operators != null && operators.size() != 0) {

        List<TezOperator> successors = new ArrayList<TezOperator>();

        for (TezOperator oper : operators) {

            Configuration operConf = oper.configure(pigContext, conf);
            /*
            if (ss != null){
               ss.addSettingsToConf(oper, conf);
            }
            */
            List<TezOperator> predecessors = plan.getPredecessors(oper);

            if (predecessors != null && predecessors.size() != 0) {
                MultiStageMRConfToTezTranslator.translateVertexConfToTez(operConf,
                        vertexMap.get(predecessors.get(0)).second);
            } else {
                MultiStageMRConfToTezTranslator.translateVertexConfToTez(operConf, null);
            }

            List<TezOperator> operSuccessors = tezPlan.getSuccessors(oper);
            if (operSuccessors != null) {
                successors.addAll(operSuccessors);
            }

            MRHelpers.doJobClientMagic(operConf);

            //mapStageConf.setInt(MRJobConfig.NUM_MAPS, numMapper);

            Vertex operVertex = new Vertex(oper.name(),
                    new ProcessorDescriptor(oper.getProcessor(), MRHelpers.createUserPayloadFromConf(operConf)),
                    oper.getParallelism(), MRHelpers.getMapResource(operConf));

            oper.configureVertex(operVertex, operConf, commonLocalResources, remoteStagingDir);

            dag.addVertex(operVertex);
            if (predecessors != null) {

                for (TezOperator predecessor : predecessors) {
                    dag.addEdge(new Edge(vertexMap.get(predecessor).first, operVertex,
                            tezPlan.getEdgeProperty(predecessor, oper)));
                }

            }

            vertexMap.put(oper, new Pair<Vertex, Configuration>(operVertex, operConf));
        }

        operators = successors;
    }
    return dag;
}

From source file:io.amient.yarn1.YarnContainerContext.java

License:Open Source License

private void prepareLocalResourceFile(Map<String, LocalResource> localResources, String fileName,
        String remoteFileName, FileSystem distFs) throws IOException {
    final Path dst = new Path(distFs.getHomeDirectory(), remoteFileName);
    FileStatus scFileStatus = distFs.getFileStatus(dst);
    final URL yarnUrl = ConverterUtils.getYarnUrlFromURI(dst.toUri());
    LocalResource scRsrc = LocalResource.newInstance(yarnUrl, LocalResourceType.FILE,
            LocalResourceVisibility.APPLICATION, scFileStatus.getLen(), scFileStatus.getModificationTime());
    localResources.put(fileName, scRsrc);
}

From source file:io.dstream.tez.utils.HadoopUtils.java

License:Apache License

/**
 * Creates a single {@link LocalResource} for the provisioned resource identified with {@link Path}
 *
 * @param fs/*from  w ww  . java2  s  .c o  m*/
 * @param provisionedResourcePath
 * @return
 */
public static LocalResource createLocalResource(FileSystem fs, Path provisionedResourcePath) {
    try {
        FileStatus scFileStatus = fs.getFileStatus(provisionedResourcePath);
        LocalResource localResource = LocalResource.newInstance(
                ConverterUtils.getYarnUrlFromURI(provisionedResourcePath.toUri()), LocalResourceType.FILE,
                LocalResourceVisibility.APPLICATION, scFileStatus.getLen(), scFileStatus.getModificationTime());
        return localResource;
    } catch (Exception e) {
        throw new IllegalStateException(
                "Failed to communicate with FileSystem while creating LocalResource: " + fs, e);
    }
}

From source file:io.hops.tensorflow.ApplicationMaster.java

License:Apache License

/**
 * Parse command line options//from www.j  ava 2  s  . c o  m
 *
 * @param args
 *     Command line args
 * @return Whether init successful and run should be invoked
 * @throws ParseException
 * @throws IOException
 */
public boolean init(String[] args) throws ParseException, IOException {
    Options opts = createOptions();
    cliParser = new GnuParser().parse(opts, args);

    containerPython = cliParser.getOptionValue(PYTHON, null);

    if (args.length == 0) {
        printUsage(opts);
        throw new IllegalArgumentException("No args specified for application master to initialize");
    }

    //Check whether customer log4j.properties file exists
    if (fileExist(LOG4J_PATH)) {
        try {
            Log4jPropertyHelper.updateLog4jConfiguration(ApplicationMaster.class, LOG4J_PATH);
        } catch (Exception e) {
            LOG.warn("Can not set up custom log4j properties. " + e);
        }
    }

    if (cliParser.hasOption(HELP)) {
        printUsage(opts);
        return false;
    }

    if (cliParser.hasOption(DEBUG)) {
        dumpOutDebugInfo();
    }

    if (!cliParser.hasOption(MAIN_RELATIVE)) {
        throw new IllegalArgumentException("No main application file specified");
    }
    mainRelative = cliParser.getOptionValue(MAIN_RELATIVE);

    if (cliParser.hasOption(ARGS)) {
        arguments = cliParser.getOptionValues(ARGS);
    }

    Map<String, String> envs = System.getenv();

    if (!envs.containsKey(Environment.CONTAINER_ID.name())) {
        if (cliParser.hasOption(APP_ATTEMPT_ID)) {
            String appIdStr = cliParser.getOptionValue(APP_ATTEMPT_ID, "");
            appAttemptID = ConverterUtils.toApplicationAttemptId(appIdStr);
        } else {
            throw new IllegalArgumentException("Application Attempt Id not set in the environment");
        }
    } else {
        ContainerId containerId = ConverterUtils.toContainerId(envs.get(Environment.CONTAINER_ID.name()));
        appAttemptID = containerId.getApplicationAttemptId();
    }

    if (!envs.containsKey(ApplicationConstants.APP_SUBMIT_TIME_ENV)) {
        throw new RuntimeException(ApplicationConstants.APP_SUBMIT_TIME_ENV + " not set in the environment");
    }
    if (!envs.containsKey(Environment.NM_HOST.name())) {
        throw new RuntimeException(Environment.NM_HOST.name() + " not set in the environment");
    }
    if (!envs.containsKey(Environment.NM_HTTP_PORT.name())) {
        throw new RuntimeException(Environment.NM_HTTP_PORT + " not set in the environment");
    }
    if (!envs.containsKey(Environment.NM_PORT.name())) {
        throw new RuntimeException(Environment.NM_PORT.name() + " not set in the environment");
    }

    LOG.info("Application master for app" + ", appId=" + appAttemptID.getApplicationId().getId()
            + ", clustertimestamp=" + appAttemptID.getApplicationId().getClusterTimestamp() + ", attemptId="
            + appAttemptID.getAttemptId());

    if (cliParser.hasOption(ENV)) {
        String shellEnvs[] = cliParser.getOptionValues(ENV);
        for (String env : shellEnvs) {
            env = env.trim();
            int index = env.indexOf('=');
            if (index == -1) {
                environment.put(env, "");
                continue;
            }
            String key = env.substring(0, index);
            String val = "";
            if (index < (env.length() - 1)) {
                val = env.substring(index + 1);
            }
            environment.put(key, val);
        }
    }

    if (cliParser.hasOption(TENSORBOARD)) {
        environment.put("YARNTF_TENSORBOARD", "true");
    }

    if (envs.containsKey(Constants.YARNTFTIMELINEDOMAIN)) {
        domainId = envs.get(Constants.YARNTFTIMELINEDOMAIN);
    }

    containerMemory = Integer.parseInt(cliParser.getOptionValue(MEMORY, "1024"));
    containerVirtualCores = Integer.parseInt(cliParser.getOptionValue(VCORES, "1"));
    containerGPUs = Integer.parseInt(cliParser.getOptionValue(GPUS, "0"));
    tfProtocol = cliParser.getOptionValue(PROTOCOL, null);

    numWorkers = Integer.parseInt(cliParser.getOptionValue(WORKERS, "1"));
    numPses = Integer.parseInt(cliParser.getOptionValue(PSES, "1"));
    numTotalContainers = numWorkers + numPses;
    if (!(numWorkers > 0 && numPses > 0 || numWorkers == 1 && numPses == 0)) {
        throw new IllegalArgumentException("Invalid no. of workers or parameter server");
    }
    // requestPriority = Integer.parseInt(cliParser.getOptionValue(PRIORITY, "0"));

    allocationTimeout = Long.parseLong(cliParser.getOptionValue(ALLOCATION_TIMEOUT, "15")) * 1000;

    environment.put("YARNTF_MEMORY", Integer.toString(containerMemory));
    environment.put("YARNTF_VCORES", Integer.toString(containerVirtualCores));
    environment.put("YARNTF_GPUS", Integer.toString(containerGPUs));
    if (tfProtocol != null) {
        environment.put("YARNTF_PROTOCOL", tfProtocol);
    }
    environment.put("YARNTF_WORKERS", Integer.toString(numWorkers));
    environment.put("YARNTF_PSES", Integer.toString(numPses));
    environment.put("YARNTF_HOME_DIR", FileSystem.get(conf).getHomeDirectory().toString());
    environment.put("PYTHONUNBUFFERED", "true");

    DistributedCacheList distCacheList = null;
    FileInputStream fin = null;
    ObjectInputStream ois = null;
    try {
        fin = new FileInputStream(Constants.DIST_CACHE_PATH);
        ois = new ObjectInputStream(fin);
        try {
            distCacheList = (DistributedCacheList) ois.readObject();
        } catch (ClassNotFoundException e) {
            e.printStackTrace();
        }
    } finally {
        org.apache.commons.io.IOUtils.closeQuietly(ois);
        org.apache.commons.io.IOUtils.closeQuietly(fin);
    }
    LOG.info("Loaded distribute cache list: " + distCacheList.toString());
    for (int i = 0; i < distCacheList.size(); i++) {
        DistributedCacheList.Entry entry = distCacheList.get(i);
        LocalResource distRsrc = LocalResource.newInstance(ConverterUtils.getYarnUrlFromURI(entry.uri),
                LocalResourceType.FILE, LocalResourceVisibility.APPLICATION, entry.size, entry.timestamp);
        localResources.put(entry.relativePath, distRsrc);
    }

    return true;
}