Example usage for org.apache.hadoop.fs FileSystem getCanonicalServiceName

List of usage examples for org.apache.hadoop.fs FileSystem getCanonicalServiceName

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileSystem getCanonicalServiceName.

Prototype

@InterfaceAudience.Public
@InterfaceStability.Evolving
@Override
public String getCanonicalServiceName() 

Source Link

Document

Get a canonical service name for this FileSystem.

Usage

From source file:azkaban.security.HadoopSecurityManager_H_1_0.java

License:Apache License

@Override
public synchronized void prefetchToken(final File tokenFile, final String userToProxy, final Logger logger)
        throws HadoopSecurityManagerException {

    logger.info("Getting hadoop tokens for " + userToProxy);

    try {//from  www .  ja v a 2  s. c  o  m
        getProxiedUser(userToProxy).doAs(new PrivilegedExceptionAction<Void>() {
            @Override
            public Void run() throws Exception {
                getToken(userToProxy);
                return null;
            }

            private void getToken(String userToProxy)
                    throws InterruptedException, IOException, HadoopSecurityManagerException {

                FileSystem fs = FileSystem.get(conf);
                // check if we get the correct FS, and most importantly, the conf
                logger.info("Getting DFS token from " + fs.getCanonicalServiceName() + fs.getUri());
                Token<?> fsToken = fs.getDelegationToken(userToProxy);
                if (fsToken == null) {
                    logger.error("Failed to fetch DFS token for ");
                    throw new HadoopSecurityManagerException("Failed to fetch DFS token for " + userToProxy);
                }
                logger.info("Created DFS token: " + fsToken.toString());
                logger.info("Token kind: " + fsToken.getKind());
                logger.info("Token id: " + fsToken.getIdentifier());
                logger.info("Token service: " + fsToken.getService());

                JobConf jc = new JobConf(conf);
                JobClient jobClient = new JobClient(jc);
                logger.info("Pre-fetching JT token: Got new JobClient: " + jc);

                Token<DelegationTokenIdentifier> mrdt = jobClient.getDelegationToken(new Text("mr token"));
                if (mrdt == null) {
                    logger.error("Failed to fetch JT token for ");
                    throw new HadoopSecurityManagerException("Failed to fetch JT token for " + userToProxy);
                }
                logger.info("Created JT token: " + mrdt.toString());
                logger.info("Token kind: " + mrdt.getKind());
                logger.info("Token id: " + mrdt.getIdentifier());
                logger.info("Token service: " + mrdt.getService());

                jc.getCredentials().addToken(mrdt.getService(), mrdt);
                jc.getCredentials().addToken(fsToken.getService(), fsToken);

                FileOutputStream fos = null;
                DataOutputStream dos = null;
                try {
                    fos = new FileOutputStream(tokenFile);
                    dos = new DataOutputStream(fos);
                    jc.getCredentials().writeTokenStorageToStream(dos);
                } finally {
                    if (dos != null) {
                        dos.close();
                    }
                    if (fos != null) {
                        fos.close();
                    }
                }
                // stash them to cancel after use.
                logger.info("Tokens loaded in " + tokenFile.getAbsolutePath());
            }
        });
    } catch (Exception e) {
        e.printStackTrace();
        throw new HadoopSecurityManagerException(
                "Failed to get hadoop tokens! " + e.getMessage() + e.getCause());

    }
}

From source file:azkaban.security.HadoopSecurityManager_H_2_0.java

License:Apache License

@Override
public synchronized void prefetchToken(final File tokenFile, final String userToProxy, final Logger logger)
        throws HadoopSecurityManagerException {

    logger.info("Getting hadoop tokens for " + userToProxy);

    try {//from   ww w .  j ava2s. c  o  m
        getProxiedUser(userToProxy).doAs(new PrivilegedExceptionAction<Void>() {
            @Override
            public Void run() throws Exception {
                getToken(userToProxy);
                return null;
            }

            private void getToken(String userToProxy)
                    throws InterruptedException, IOException, HadoopSecurityManagerException {

                FileSystem fs = FileSystem.get(conf);
                // check if we get the correct FS, and most importantly, the conf
                logger.info("Getting DFS token from " + fs.getCanonicalServiceName() + fs.getUri());
                Token<?> fsToken = fs.getDelegationToken(userToProxy);
                if (fsToken == null) {
                    logger.error("Failed to fetch DFS token for ");
                    throw new HadoopSecurityManagerException("Failed to fetch DFS token for " + userToProxy);
                }
                logger.info("Created DFS token: " + fsToken.toString());
                logger.info("Token kind: " + fsToken.getKind());
                logger.info("Token id: " + fsToken.getIdentifier());
                logger.info("Token service: " + fsToken.getService());

                JobConf jc = new JobConf(conf);
                JobClient jobClient = new JobClient(jc);
                logger.info("Pre-fetching JT token: Got new JobClient: " + jc);

                Token<DelegationTokenIdentifier> mrdt = jobClient.getDelegationToken(new Text("mr token"));
                if (mrdt == null) {
                    logger.error("Failed to fetch JT token for ");
                    throw new HadoopSecurityManagerException("Failed to fetch JT token for " + userToProxy);
                }
                logger.info("Created JT token: " + mrdt.toString());
                logger.info("Token kind: " + mrdt.getKind());
                logger.info("Token id: " + mrdt.getIdentifier());
                logger.info("Token service: " + mrdt.getService());

                jc.getCredentials().addToken(mrdt.getService(), mrdt);
                jc.getCredentials().addToken(fsToken.getService(), fsToken);

                FileOutputStream fos = null;
                DataOutputStream dos = null;
                try {
                    fos = new FileOutputStream(tokenFile);
                    dos = new DataOutputStream(fos);
                    jc.getCredentials().writeTokenStorageToStream(dos);
                } finally {
                    if (dos != null) {
                        try {
                            dos.close();
                        } catch (Throwable t) {
                            // best effort
                            logger.error(
                                    "encountered exception while closing DataOutputStream of the tokenFile", t);
                        }
                    }
                    if (fos != null) {
                        fos.close();
                    }
                }
                // stash them to cancel after use.
                logger.info("Tokens loaded in " + tokenFile.getAbsolutePath());
            }
        });
    } catch (Exception e) {
        throw new HadoopSecurityManagerException(
                "Failed to get hadoop tokens! " + e.getMessage() + e.getCause());

    }
}

From source file:com.asakusafw.runtime.directio.hadoop.HadoopDataSourceProfileTest.java

License:Apache License

/**
 * convert with relative path./*w ww . j  a v a  2  s .c o m*/
 * @throws Exception if failed
 */
@Test
public void convert_relpath() throws Exception {
    Map<String, String> attributes = new HashMap<>();
    attributes.put(KEY_PATH, "relative");
    DirectDataSourceProfile profile = new DirectDataSourceProfile("testing", HadoopDataSource.class, "context",
            attributes);
    Configuration conf = new Configuration();
    HadoopDataSourceProfile result = HadoopDataSourceProfile.convert(profile, conf);

    FileSystem defaultFs = FileSystem.get(conf);
    Path path = defaultFs.makeQualified(new Path(defaultFs.getWorkingDirectory(), "relative"));
    assertThat(result.getFileSystem().getCanonicalServiceName(), is(defaultFs.getCanonicalServiceName()));
    assertThat(result.getFileSystemPath(), is(path));
}

From source file:com.google.cloud.bigtable.hbase.TestImport.java

License:Open Source License

@Test
@Category(KnownGap.class)
public void testMapReduce() throws IOException, ClassNotFoundException, InterruptedException {
    Admin admin = getConnection().getAdmin();

    admin.disableTable(TABLE_NAME);//w  w w  . j a  v a  2s  .c om
    admin.deleteTable(TABLE_NAME);
    IntegrationTests.createTable(TABLE_NAME);
    // Put a value.
    byte[] rowKey = dataHelper.randomData("testrow-");
    byte[] qual = dataHelper.randomData("testQualifier-");
    byte[] value = dataHelper.randomData("testValue-");

    try (Table oldTable = getConnection().getTable(TABLE_NAME)) {
        Put put = new Put(rowKey);
        put.addColumn(COLUMN_FAMILY, qual, value);
        oldTable.put(put);

        // Assert the value is there.
        Get get = new Get(rowKey);
        Result result = oldTable.get(get);
        List<Cell> cells = result.listCells();
        Assert.assertEquals(1, cells.size());
        Assert.assertArrayEquals(CellUtil.cloneValue(cells.get(0)), value);
    }

    // Run the export.
    Configuration conf = getConnection().getConfiguration();

    //conf.set("fs.defaultFS", "file:///");
    FileSystem dfs = IntegrationTests.getMiniCluster().getFileSystem();
    String tempDir = "hdfs://" + dfs.getCanonicalServiceName() + "/tmp/backup";

    String[] args = new String[] { TABLE_NAME.getNameAsString(), tempDir };
    Job job = Export.createSubmittableJob(conf, args);
    // So it looks for jars in the local FS, not HDFS.
    job.getConfiguration().set("fs.defaultFS", "file:///");
    Assert.assertTrue(job.waitForCompletion(true));

    // Create new table.
    TableName newTableName = IntegrationTests.newTestTableName();
    try (Table newTable = getConnection().getTable(newTableName)) {
        // Change for method in IntegrationTests
        HColumnDescriptor hcd = new HColumnDescriptor(IntegrationTests.COLUMN_FAMILY);
        HTableDescriptor htd = new HTableDescriptor(newTableName);
        htd.addFamily(hcd);
        admin.createTable(htd);

        // Run the import.
        args = new String[] { newTableName.getNameAsString(), tempDir };
        job = Import.createSubmittableJob(conf, args);
        job.getConfiguration().set("fs.defaultFS", "file:///");
        Assert.assertTrue(job.waitForCompletion(true));

        // Assert the value is there.
        Get get = new Get(rowKey);
        Result result = newTable.get(get);
        List<Cell> cells = result.listCells();
        Assert.assertEquals(1, cells.size());
        Assert.assertArrayEquals(CellUtil.cloneValue(cells.get(0)), value);
    } finally {
        admin.disableTable(newTableName);
        admin.deleteTable(newTableName);
    }
}