Example usage for org.apache.hadoop.yarn.api.records LocalResource getResource

List of usage examples for org.apache.hadoop.yarn.api.records LocalResource getResource

Introduction

In this page you can find the example usage for org.apache.hadoop.yarn.api.records LocalResource getResource.

Prototype

@Public
@Stable
public abstract URL getResource();

Source Link

Document

Get the location of the resource to be localized.

Usage

From source file:com.continuuity.weave.yarn.YarnWeavePreparer.java

License:Open Source License

private void saveLocalFiles(Map<String, LocalResource> localResources, Set<String> keys) throws IOException {
    Map<String, LocalFile> localFiles = Maps.transformEntries(
            Maps.filterKeys(localResources, Predicates.in(keys)),
            new Maps.EntryTransformer<String, LocalResource, LocalFile>() {
                @Override//from  w w w. jav a  2s .  co  m
                public LocalFile transformEntry(String key, LocalResource value) {
                    try {
                        return new DefaultLocalFile(key,
                                ConverterUtils.getPathFromYarnURL(value.getResource()).toUri(),
                                value.getTimestamp(), value.getSize(),
                                value.getType() != LocalResourceType.FILE, value.getPattern());
                    } catch (URISyntaxException e) {
                        throw Throwables.propagate(e);
                    }
                }
            });

    LOG.debug("Create and copy localFiles.json");
    Location location = createTempLocation("localFiles", ".json");
    Writer writer = new OutputStreamWriter(location.getOutputStream(), Charsets.UTF_8);
    try {
        new GsonBuilder().registerTypeAdapter(LocalFile.class, new LocalFileCodec()).create()
                .toJson(localFiles.values(), new TypeToken<List<LocalFile>>() {
                }.getType(), writer);
    } finally {
        writer.close();
    }
    LOG.debug("Done localFiles.json");
    localResources.put("localFiles.json", YarnUtils.createLocalResource(location));
}

From source file:com.scaleoutsoftware.soss.hserver.hadoop.DistributedCacheManager.java

License:Apache License

/**
 * Set up the distributed cache by localizing the resources, and updating
 * the configuration with references to the localized resources.
 * @param conf job configuration//from  ww w .j  a  va 2s.c  o m
 * @throws IOException
 */
public void setup(Configuration conf) throws IOException {
    //If we are not 0th worker, wait for 0th worker to set up the cache
    if (InvocationWorker.getIgWorkerIndex() > 0 && InvocationWorker.getNumberOfWorkers() > 1) {
        try {
            InvocationWorker.getSynchronizationBarrier().waitForComplete(ACTION_NAME, SYNCHRONIZATION_WAIT_MS,
                    WAIT_GRANULARITY_MS);
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
        return;
    }

    File workDir = new File(System.getProperty("user.dir"));

    // Generate YARN local resources objects corresponding to the distributed
    // cache configuration
    Map<String, LocalResource> localResources = new LinkedHashMap<String, LocalResource>();
    MRApps.setupDistributedCache(conf, localResources);

    //CODE CHANGE FROM ORIGINAL FILE:
    //We need to clear the resources from jar files, since they are distributed through the IG.
    //
    Iterator<Map.Entry<String, LocalResource>> iterator = localResources.entrySet().iterator();
    while (iterator.hasNext()) {
        Entry<String, LocalResource> entry = iterator.next();
        if (entry.getKey().endsWith(".jar")) {
            iterator.remove();
        }
    }

    // Generating unique numbers for FSDownload.

    AtomicLong uniqueNumberGenerator = new AtomicLong(System.currentTimeMillis());

    // Find which resources are to be put on the local classpath
    Map<String, Path> classpaths = new HashMap<String, Path>();
    Path[] archiveClassPaths = DistributedCache.getArchiveClassPaths(conf);
    if (archiveClassPaths != null) {
        for (Path p : archiveClassPaths) {
            FileSystem remoteFS = p.getFileSystem(conf);
            p = remoteFS.resolvePath(p.makeQualified(remoteFS.getUri(), remoteFS.getWorkingDirectory()));
            classpaths.put(p.toUri().getPath().toString(), p);
        }
    }

    Path[] fileClassPaths = DistributedCache.getFileClassPaths(conf);
    if (fileClassPaths != null) {
        for (Path p : fileClassPaths) {
            FileSystem remoteFS = p.getFileSystem(conf);
            p = remoteFS.resolvePath(p.makeQualified(remoteFS.getUri(), remoteFS.getWorkingDirectory()));
            classpaths.put(p.toUri().getPath().toString(), p);
        }
    }

    // Localize the resources
    LocalDirAllocator localDirAllocator = new LocalDirAllocator(MRConfig.LOCAL_DIR);
    FileContext localFSFileContext = FileContext.getLocalFSFileContext();
    UserGroupInformation ugi = UserGroupInformation.getCurrentUser();

    ExecutorService exec = null;
    try {
        ThreadFactory tf = new ThreadFactoryBuilder()
                .setNameFormat("LocalDistributedCacheManager Downloader #%d").build();
        exec = Executors.newCachedThreadPool(tf);
        Path destPath = localDirAllocator.getLocalPathForWrite(".", conf);
        Map<LocalResource, Future<Path>> resourcesToPaths = Maps.newHashMap();
        for (LocalResource resource : localResources.values()) {
            Callable<Path> download = new FSDownload(localFSFileContext, ugi, conf,
                    new Path(destPath, Long.toString(uniqueNumberGenerator.incrementAndGet())), resource);
            Future<Path> future = exec.submit(download);
            resourcesToPaths.put(resource, future);
        }
        for (Entry<String, LocalResource> entry : localResources.entrySet()) {
            LocalResource resource = entry.getValue();
            Path path;
            try {
                path = resourcesToPaths.get(resource).get();
            } catch (InterruptedException e) {
                throw new IOException(e);
            } catch (ExecutionException e) {
                throw new IOException(e);
            }
            String pathString = path.toUri().toString();
            String link = entry.getKey();
            String target = new File(path.toUri()).getPath();
            symlink(workDir, target, link);

            if (resource.getType() == LocalResourceType.ARCHIVE) {
                localArchives.add(pathString);
            } else if (resource.getType() == LocalResourceType.FILE) {
                localFiles.add(pathString);
            } else if (resource.getType() == LocalResourceType.PATTERN) {
                //PATTERN is not currently used in local mode
                throw new IllegalArgumentException(
                        "Resource type PATTERN is not " + "implemented yet. " + resource.getResource());
            }
            Path resourcePath;
            try {
                resourcePath = ConverterUtils.getPathFromYarnURL(resource.getResource());
            } catch (URISyntaxException e) {
                throw new IOException(e);
            }
            LOG.info(String.format("Localized %s as %s", resourcePath, path));
            String cp = resourcePath.toUri().getPath();
            if (classpaths.keySet().contains(cp)) {
                localClasspaths.add(path.toUri().getPath().toString());
            }
        }
    } finally {
        if (exec != null) {
            exec.shutdown();
        }
    }
    // Update the configuration object with localized data.
    if (!localArchives.isEmpty()) {
        conf.set(MRJobConfig.CACHE_LOCALARCHIVES,
                StringUtils.arrayToString(localArchives.toArray(new String[localArchives.size()])));
    }
    if (!localFiles.isEmpty()) {
        conf.set(MRJobConfig.CACHE_LOCALFILES,
                StringUtils.arrayToString(localFiles.toArray(new String[localArchives.size()])));
    }
    setupCalled = true;

    //If we are  0th worker, signal action complete
    if (InvocationWorker.getIgWorkerIndex() == 0 && InvocationWorker.getNumberOfWorkers() > 1) {
        try {
            InvocationWorker.getSynchronizationBarrier().signalComplete(ACTION_NAME);
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
    }

}

From source file:org.apache.drill.yarn.core.LaunchSpec.java

License:Apache License

public void dump(PrintStream out) {
    if (command != null) {
        out.print("Command: ");
        out.println(command);/*from   w  ww . j a v a  2 s .  c  o m*/
    }
    if (mainClass != null) {
        out.print("Main Class: ");
        out.println(mainClass);
        out.println("VM Args:");
        if (vmArgs.isEmpty()) {
            out.println("  None");
        } else {
            for (String item : vmArgs) {
                out.print("  ");
                out.println(item);
            }
        }
        out.println("Class Path:");
        if (classPath.isEmpty()) {
            out.println("  None");
        } else {
            for (String item : classPath) {
                out.print("  ");
                out.println(item);
            }
        }
    }
    out.println("Program Args:");
    if (cmdArgs.isEmpty()) {
        out.println("  None");
    } else {
        for (String item : cmdArgs) {
            out.print("  ");
            out.println(item);
        }
    }
    out.println("Environment:");
    if (env.isEmpty()) {
        out.println("  None");
    } else {
        for (String key : env.keySet()) {
            out.print("  ");
            out.print(key);
            out.print("=");
            out.println(env.get(key));
        }
    }
    out.println("Resources: ");
    if (resources.isEmpty()) {
        out.println("  None");
    } else {
        for (String key : resources.keySet()) {
            out.print("  Key: ");
            out.println(key);
            LocalResource resource = resources.get(key);
            out.print("   URL: ");
            out.println(resource.getResource().toString());
            out.print("   Size: ");
            out.println(resource.getSize());
            out.print("   Timestamp: ");
            out.println(DoYUtil.toIsoTime(resource.getTimestamp()));
            out.print("   Type: ");
            out.println(resource.getType().toString());
            out.print("   Visiblity: ");
            out.println(resource.getVisibility().toString());
        }
    }
}

From source file:org.apache.hoya.core.launch.AbstractLauncher.java

License:Apache License

/**
 * Dump local resources at debug level/*from   w  w  w  . j  av a2 s.  c om*/
 */
private void dumpLocalResources() {
    if (log.isDebugEnabled()) {
        log.debug("{} resources: ", localResources.size());
        for (Map.Entry<String, LocalResource> entry : localResources.entrySet()) {

            String key = entry.getKey();
            LocalResource val = entry.getValue();
            log.debug(key + "=" + HoyaUtils.stringify(val.getResource()));
        }
    }
}

From source file:org.apache.reef.bridge.client.JobResourceUploader.java

License:Apache License

/**
 * This class is invoked from Org.Apache.REEF.Client.Yarn.LegacyJobResourceUploader in .NET code.
 * Arguments:/*from ww w  .j a  va  2 s . co  m*/
 * [0] : Local path for already generated archive
 * [1] : Path of job submission directory
 * [2] : File path for output with details of uploaded resource
 */
public static void main(final String[] args) throws InjectionException, IOException {
    Validate.isTrue(args.length == 3, "Job resource uploader requires 3 args");
    final File localFile = new File(args[0]);
    Validate.isTrue(localFile.exists(), "Local archive does not exist " + localFile.getAbsolutePath());
    final String jobSubmissionDirectory = args[1];
    final String localOutputPath = args[2];

    LOG.log(Level.INFO, "Received args: LocalPath " + localFile.getAbsolutePath() + " Submission directory "
            + jobSubmissionDirectory + " LocalOutputPath " + localOutputPath);
    final Configuration configuration = Configurations.merge(
            Tang.Factory.getTang().newConfigurationBuilder()
                    .bindNamedParameter(JobSubmissionDirectoryPrefix.class, jobSubmissionDirectory).build(),
            YarnClientConfiguration.CONF.build());

    final JobUploader jobUploader = Tang.Factory.getTang().newInjector(configuration)
            .getInstance(JobUploader.class);
    final LocalResource localResource = jobUploader.createJobFolder(jobSubmissionDirectory)
            .uploadAsLocalResource(localFile);

    // Output: <UploadedPath>;<LastModificationUnixTimestamp>;<ResourceSize>
    final URL resource = localResource.getResource();
    final String outputString = String.format("%s://%s:%d%s;%d;%d", resource.getScheme(), resource.getHost(),
            resource.getPort(), resource.getFile(), localResource.getTimestamp(), localResource.getSize());
    LOG.log(Level.INFO, "Writing output: " + outputString);
    try (Writer writer = new BufferedWriter(
            new OutputStreamWriter(new FileOutputStream(localOutputPath), "utf-8"))) {
        writer.write(outputString);
    }

    LOG.log(Level.FINER, "Done writing output file");
}

From source file:org.apache.slider.core.launch.AbstractLauncher.java

License:Apache License

/**
 * Dump local resources at debug level//from w  w w .  j  a v a 2  s .  c o  m
 */
private void dumpLocalResources() {
    if (log.isDebugEnabled()) {
        log.debug("{} resources: ", localResources.size());
        for (Map.Entry<String, LocalResource> entry : localResources.entrySet()) {

            String key = entry.getKey();
            LocalResource val = entry.getValue();
            log.debug(key + "=" + SliderUtils.stringify(val.getResource()));
        }
    }
}

From source file:org.apache.tez.client.TezClientUtils.java

License:Apache License

/**
 * Obtains tokens for the DAG based on the list of URIs setup in the DAG. The
 * fetched credentials are populated back into the DAG and can be retrieved
 * via dag.getCredentials/*from www .ja  va  2s .c om*/
 * 
 * @param dag
 *          the dag for which credentials need to be setup
 * @param sessionCredentials
 *          session credentials which have already been obtained, and will be
 *          required for the DAG
 * @param conf
 * @throws IOException
 */
@Private
static Credentials setupDAGCredentials(DAG dag, Credentials sessionCredentials, Configuration conf)
        throws IOException {

    Preconditions.checkNotNull(sessionCredentials);
    TezCommonUtils.logCredentials(LOG, sessionCredentials, "session");

    Credentials dagCredentials = new Credentials();
    // All session creds are required for the DAG.
    dagCredentials.mergeAll(sessionCredentials);

    // Add additional credentials based on any URIs that the user may have specified.

    // Obtain Credentials for any paths that the user may have configured.
    addFileSystemCredentialsFromURIs(dag.getURIsForCredentials(), dagCredentials, conf);

    // Obtain Credentials for the local resources configured on the DAG
    try {
        Set<Path> lrPaths = new HashSet<Path>();
        for (Vertex v : dag.getVertices()) {
            for (LocalResource lr : v.getTaskLocalFiles().values()) {
                lrPaths.add(ConverterUtils.getPathFromYarnURL(lr.getResource()));
            }
            List<DataSourceDescriptor> dataSources = v.getDataSources();
            for (DataSourceDescriptor dataSource : dataSources) {
                addFileSystemCredentialsFromURIs(dataSource.getURIsForCredentials(), dagCredentials, conf);
            }
            List<DataSinkDescriptor> dataSinks = v.getDataSinks();
            for (DataSinkDescriptor dataSink : dataSinks) {
                addFileSystemCredentialsFromURIs(dataSink.getURIsForCredentials(), dagCredentials, conf);
            }
        }

        for (LocalResource lr : dag.getTaskLocalFiles().values()) {
            lrPaths.add(ConverterUtils.getPathFromYarnURL(lr.getResource()));
        }

        Path[] paths = lrPaths.toArray(new Path[lrPaths.size()]);
        TokenCache.obtainTokensForFileSystems(dagCredentials, paths, conf);

    } catch (URISyntaxException e) {
        throw new IOException(e);
    }

    return dagCredentials;
}

From source file:org.apache.tez.common.TezCommonUtils.java

License:Apache License

public static void addAdditionalLocalResources(Map<String, LocalResource> additionalLrs,
        Map<String, LocalResource> originalLRs, String logContext) {
    // TODO TEZ-1798. Handle contents of Tez archives for duplicate LocalResource checks
    if (additionalLrs != null && !additionalLrs.isEmpty()) {
        StringBuilder sb = new StringBuilder();
        for (Map.Entry<String, LocalResource> lrEntry : additionalLrs.entrySet()) {
            LocalResource originalLr = originalLRs.get(lrEntry.getKey());
            if (originalLr != null) {
                LocalResource additionalLr = lrEntry.getValue();
                if (originalLr.getSize() != additionalLr.getSize()) {
                    throw new TezUncheckedException("Duplicate Resources found with different size for ["
                            + logContext + "]: " + lrEntry.getKey() + " : " + "[" + additionalLr.getResource()
                            + "=" + additionalLr.getSize() + "],[" + originalLr.getResource() + "="
                            + originalLr.getSize());
                } else {
                    if (originalLr.getResource().equals(additionalLr.getResource())) {
                        sb.append("[").append(lrEntry.getKey()).append(" : Duplicate]");
                    } else {
                        sb.append("[").append(lrEntry.getKey()).append(" : DuplicateDifferentPath]");
                    }//from   w  w w.  j  a v a  2 s  .c  om
                }
            }
            // The LR either does not exist, or is an 'equivalent' dupe.
            // Prefer the tez specified LR instead of the equivalent user specified LR for container reuse matching
            originalLRs.put(lrEntry.getKey(), lrEntry.getValue());
        }
        String logString = sb.toString();
        if (!logString.isEmpty()) {
            LOG.warn("Found Resources Duplication in " + logContext + " after including resources from "
                    + TezConfiguration.TEZ_LIB_URIS + " and " + TezConfiguration.TEZ_AUX_URIS + ": "
                    + logString);
        }
    }
}

From source file:org.apache.tez.common.TezConverterUtils.java

License:Apache License

@Private
public static TezLocalResource convertYarnLocalResourceToTez(LocalResource lr) throws URISyntaxException {
    return new TezLocalResource(getURIFromYarnURL(lr.getResource()), lr.getSize(), lr.getTimestamp());
}

From source file:org.apache.tez.dag.api.client.rpc.TestDAGClientAMProtocolBlockingPBServerImpl.java

License:Apache License

@Test(timeout = 5000)
@SuppressWarnings("unchecked")
public void testSubmitDagInSessionWithLargeDagPlan() throws Exception {
    int maxIPCMsgSize = 1024;
    String dagPlanName = "dagplan-name";
    File requestFile = tmpFolder.newFile("request-file");
    TezConfiguration conf = new TezConfiguration();
    conf.setInt(CommonConfigurationKeys.IPC_MAXIMUM_DATA_LENGTH, maxIPCMsgSize);

    byte[] randomBytes = new byte[2 * maxIPCMsgSize];
    (new Random()).nextBytes(randomBytes);
    UserPayload payload = UserPayload.create(ByteBuffer.wrap(randomBytes));
    Vertex vertex = Vertex.create("V", ProcessorDescriptor.create("P").setUserPayload(payload), 1);
    DAGPlan dagPlan = DAG.create(dagPlanName).addVertex(vertex).createDag(conf, null, null, null, false);

    String lrName = "localResource";
    String scheme = "file";
    String host = "localhost";
    int port = 80;
    String path = "/test";
    URL lrURL = URL.newInstance(scheme, host, port, path);
    LocalResource localResource = LocalResource.newInstance(lrURL, LocalResourceType.FILE,
            LocalResourceVisibility.PUBLIC, 1, 1);
    Map<String, LocalResource> localResources = new HashMap<>();
    localResources.put(lrName, localResource);

    SubmitDAGRequestProto.Builder requestBuilder = SubmitDAGRequestProto.newBuilder().setDAGPlan(dagPlan)
            .setAdditionalAmResources(DagTypeConverters.convertFromLocalResources(localResources));
    try (FileOutputStream fileOutputStream = new FileOutputStream(requestFile)) {
        requestBuilder.build().writeTo(fileOutputStream);
    }/*  w ww.  j a  va 2s . c  o  m*/

    DAGClientHandler dagClientHandler = mock(DAGClientHandler.class);
    ACLManager aclManager = mock(ACLManager.class);
    DAGClientAMProtocolBlockingPBServerImpl serverImpl = spy(
            new DAGClientAMProtocolBlockingPBServerImpl(dagClientHandler, FileSystem.get(conf)));
    when(dagClientHandler.getACLManager()).thenReturn(aclManager);
    when(dagClientHandler.submitDAG((DAGPlan) any(), (Map<String, LocalResource>) any())).thenReturn("dag-id");
    when(aclManager.checkAMModifyAccess((UserGroupInformation) any())).thenReturn(true);

    requestBuilder.clear().setSerializedRequestPath(requestFile.getAbsolutePath());
    serverImpl.submitDAG(null, requestBuilder.build());

    ArgumentCaptor<DAGPlan> dagPlanCaptor = ArgumentCaptor.forClass(DAGPlan.class);
    verify(dagClientHandler).submitDAG(dagPlanCaptor.capture(), localResourcesCaptor.capture());
    dagPlan = dagPlanCaptor.getValue();
    localResources = localResourcesCaptor.getValue();

    assertEquals(dagPlan.getName(), dagPlanName);
    assertEquals(dagPlan.getVertexCount(), 1);
    assertTrue(dagPlan.getSerializedSize() > maxIPCMsgSize);
    assertArrayEquals(randomBytes,
            dagPlan.getVertex(0).getProcessorDescriptor().getTezUserPayload().getUserPayload().toByteArray());
    assertEquals(localResources.size(), 1);
    assertTrue(localResources.containsKey(lrName));
    localResource = localResources.get(lrName);
    assertEquals(localResource.getType(), LocalResourceType.FILE);
    assertEquals(localResource.getVisibility(), LocalResourceVisibility.PUBLIC);
    lrURL = localResource.getResource();
    assertEquals(lrURL.getScheme(), scheme);
    assertEquals(lrURL.getHost(), host);
    assertEquals(lrURL.getPort(), port);
    assertEquals(lrURL.getFile(), path);
}