Example usage for org.apache.hadoop.yarn.api.records URL getScheme

List of usage examples for org.apache.hadoop.yarn.api.records URL getScheme

Introduction

In this page you can find the example usage for org.apache.hadoop.yarn.api.records URL getScheme.

Prototype

@Public
@Stable
public abstract String getScheme();

Source Link

Document

Get the scheme of the URL.

Usage

From source file:org.apache.reef.bridge.client.JobResourceUploader.java

License:Apache License

/**
 * This class is invoked from Org.Apache.REEF.Client.Yarn.LegacyJobResourceUploader in .NET code.
 * Arguments://ww w .ja  v a2 s.c  o  m
 * [0] : Local path for already generated archive
 * [1] : Path of job submission directory
 * [2] : File path for output with details of uploaded resource
 */
public static void main(final String[] args) throws InjectionException, IOException {
    Validate.isTrue(args.length == 3, "Job resource uploader requires 3 args");
    final File localFile = new File(args[0]);
    Validate.isTrue(localFile.exists(), "Local archive does not exist " + localFile.getAbsolutePath());
    final String jobSubmissionDirectory = args[1];
    final String localOutputPath = args[2];

    LOG.log(Level.INFO, "Received args: LocalPath " + localFile.getAbsolutePath() + " Submission directory "
            + jobSubmissionDirectory + " LocalOutputPath " + localOutputPath);
    final Configuration configuration = Configurations.merge(
            Tang.Factory.getTang().newConfigurationBuilder()
                    .bindNamedParameter(JobSubmissionDirectoryPrefix.class, jobSubmissionDirectory).build(),
            YarnClientConfiguration.CONF.build());

    final JobUploader jobUploader = Tang.Factory.getTang().newInjector(configuration)
            .getInstance(JobUploader.class);
    final LocalResource localResource = jobUploader.createJobFolder(jobSubmissionDirectory)
            .uploadAsLocalResource(localFile);

    // Output: <UploadedPath>;<LastModificationUnixTimestamp>;<ResourceSize>
    final URL resource = localResource.getResource();
    final String outputString = String.format("%s://%s:%d%s;%d;%d", resource.getScheme(), resource.getHost(),
            resource.getPort(), resource.getFile(), localResource.getTimestamp(), localResource.getSize());
    LOG.log(Level.INFO, "Writing output: " + outputString);
    try (Writer writer = new BufferedWriter(
            new OutputStreamWriter(new FileOutputStream(localOutputPath), "utf-8"))) {
        writer.write(outputString);
    }

    LOG.log(Level.FINER, "Done writing output file");
}

From source file:org.apache.tez.common.TezConverterUtils.java

License:Apache License

/**
 * return a {@link URI} from a given url
 * /*ww  w  .j  a  va  2  s  .  co  m*/
 * @param url
 *          url to convert
 * @return path from {@link URL}
 * @throws URISyntaxException
 */
@Private
public static URI getURIFromYarnURL(URL url) throws URISyntaxException {
    String scheme = url.getScheme() == null ? "" : url.getScheme();

    String authority = "";
    if (url.getHost() != null) {
        authority = url.getHost();
        if (url.getUserInfo() != null) {
            authority = url.getUserInfo() + "@" + authority;
        }
        if (url.getPort() > 0) {
            authority += ":" + url.getPort();
        }
    }

    return new URI(scheme, authority, url.getFile(), null, null).normalize();
}

From source file:org.apache.tez.dag.api.client.rpc.TestDAGClientAMProtocolBlockingPBServerImpl.java

License:Apache License

@Test(timeout = 5000)
@SuppressWarnings("unchecked")
public void testSubmitDagInSessionWithLargeDagPlan() throws Exception {
    int maxIPCMsgSize = 1024;
    String dagPlanName = "dagplan-name";
    File requestFile = tmpFolder.newFile("request-file");
    TezConfiguration conf = new TezConfiguration();
    conf.setInt(CommonConfigurationKeys.IPC_MAXIMUM_DATA_LENGTH, maxIPCMsgSize);

    byte[] randomBytes = new byte[2 * maxIPCMsgSize];
    (new Random()).nextBytes(randomBytes);
    UserPayload payload = UserPayload.create(ByteBuffer.wrap(randomBytes));
    Vertex vertex = Vertex.create("V", ProcessorDescriptor.create("P").setUserPayload(payload), 1);
    DAGPlan dagPlan = DAG.create(dagPlanName).addVertex(vertex).createDag(conf, null, null, null, false);

    String lrName = "localResource";
    String scheme = "file";
    String host = "localhost";
    int port = 80;
    String path = "/test";
    URL lrURL = URL.newInstance(scheme, host, port, path);
    LocalResource localResource = LocalResource.newInstance(lrURL, LocalResourceType.FILE,
            LocalResourceVisibility.PUBLIC, 1, 1);
    Map<String, LocalResource> localResources = new HashMap<>();
    localResources.put(lrName, localResource);

    SubmitDAGRequestProto.Builder requestBuilder = SubmitDAGRequestProto.newBuilder().setDAGPlan(dagPlan)
            .setAdditionalAmResources(DagTypeConverters.convertFromLocalResources(localResources));
    try (FileOutputStream fileOutputStream = new FileOutputStream(requestFile)) {
        requestBuilder.build().writeTo(fileOutputStream);
    }//from  w  w  w .j a v a 2  s  . c o m

    DAGClientHandler dagClientHandler = mock(DAGClientHandler.class);
    ACLManager aclManager = mock(ACLManager.class);
    DAGClientAMProtocolBlockingPBServerImpl serverImpl = spy(
            new DAGClientAMProtocolBlockingPBServerImpl(dagClientHandler, FileSystem.get(conf)));
    when(dagClientHandler.getACLManager()).thenReturn(aclManager);
    when(dagClientHandler.submitDAG((DAGPlan) any(), (Map<String, LocalResource>) any())).thenReturn("dag-id");
    when(aclManager.checkAMModifyAccess((UserGroupInformation) any())).thenReturn(true);

    requestBuilder.clear().setSerializedRequestPath(requestFile.getAbsolutePath());
    serverImpl.submitDAG(null, requestBuilder.build());

    ArgumentCaptor<DAGPlan> dagPlanCaptor = ArgumentCaptor.forClass(DAGPlan.class);
    verify(dagClientHandler).submitDAG(dagPlanCaptor.capture(), localResourcesCaptor.capture());
    dagPlan = dagPlanCaptor.getValue();
    localResources = localResourcesCaptor.getValue();

    assertEquals(dagPlan.getName(), dagPlanName);
    assertEquals(dagPlan.getVertexCount(), 1);
    assertTrue(dagPlan.getSerializedSize() > maxIPCMsgSize);
    assertArrayEquals(randomBytes,
            dagPlan.getVertex(0).getProcessorDescriptor().getTezUserPayload().getUserPayload().toByteArray());
    assertEquals(localResources.size(), 1);
    assertTrue(localResources.containsKey(lrName));
    localResource = localResources.get(lrName);
    assertEquals(localResource.getType(), LocalResourceType.FILE);
    assertEquals(localResource.getVisibility(), LocalResourceVisibility.PUBLIC);
    lrURL = localResource.getResource();
    assertEquals(lrURL.getScheme(), scheme);
    assertEquals(lrURL.getHost(), host);
    assertEquals(lrURL.getPort(), port);
    assertEquals(lrURL.getFile(), path);
}

From source file:org.apache.tez.dag.api.DagTypeConverters.java

License:Apache License

public static String convertToDAGPlan(URL resource) {
    // see above notes on HDFS URL handling
    return resource.getScheme() + "://" + resource.getHost() + ":" + resource.getPort() + resource.getFile();
}

From source file:org.apache.tez.dag.api.TestDagTypeConverters.java

License:Apache License

@Test(timeout = 5000)
public void testYarnPathTranslation() {
    // Without port
    String p1String = "hdfs://mycluster/file";
    Path p1Path = new Path(p1String);
    // Users would translate this via this mechanic.
    URL lr1Url = ConverterUtils.getYarnUrlFromPath(p1Path);
    // Serialize to dag plan.
    String p1StringSerialized = DagTypeConverters.convertToDAGPlan(lr1Url);
    // Deserialize
    URL lr1UrlDeserialized = DagTypeConverters.convertToYarnURL(p1StringSerialized);
    Assert.assertEquals("mycluster", lr1UrlDeserialized.getHost());
    Assert.assertEquals("/file", lr1UrlDeserialized.getFile());
    Assert.assertEquals("hdfs", lr1UrlDeserialized.getScheme());

    // With port//from w w w  . j  av  a2  s  .  c  o m
    String p2String = "hdfs://mycluster:2311/file";
    Path p2Path = new Path(p2String);
    // Users would translate this via this mechanic.
    URL lr2Url = ConverterUtils.getYarnUrlFromPath(p2Path);
    // Serialize to dag plan.
    String p2StringSerialized = DagTypeConverters.convertToDAGPlan(lr2Url);
    // Deserialize
    URL lr2UrlDeserialized = DagTypeConverters.convertToYarnURL(p2StringSerialized);
    Assert.assertEquals("mycluster", lr2UrlDeserialized.getHost());
    Assert.assertEquals("/file", lr2UrlDeserialized.getFile());
    Assert.assertEquals("hdfs", lr2UrlDeserialized.getScheme());
    Assert.assertEquals(2311, lr2UrlDeserialized.getPort());
}