Example usage for org.apache.hadoop.fs CommonConfigurationKeysPublic HADOOP_SECURITY_AUTHENTICATION

List of usage examples for org.apache.hadoop.fs CommonConfigurationKeysPublic HADOOP_SECURITY_AUTHENTICATION

Introduction

In this page you can find the example usage for org.apache.hadoop.fs CommonConfigurationKeysPublic HADOOP_SECURITY_AUTHENTICATION.

Prototype

String HADOOP_SECURITY_AUTHENTICATION

To view the source code for org.apache.hadoop.fs CommonConfigurationKeysPublic HADOOP_SECURITY_AUTHENTICATION.

Click Source Link

Usage

From source file:org.apache.slider.common.tools.SliderUtils.java

License:Apache License

/**
 * Turn on security. This is setup to only run once.
 * @param conf configuration to build up security
 * @return true if security was initialized in this call
 * @throws IOException IO/Net problems//from  ww  w .ja va 2 s . c o m
 * @throws BadConfigException the configuration and system state are inconsistent
 */
public static boolean initProcessSecurity(Configuration conf) throws IOException, BadConfigException {

    if (processSecurityAlreadyInitialized.compareAndSet(true, true)) {
        //security is already inited
        return false;
    }

    log.info("JVM initialized into secure mode with kerberos realm {}", SliderUtils.getKerberosRealm());
    //this gets UGI to reset its previous world view (i.e simple auth)
    //security
    log.debug("java.security.krb5.realm={}", System.getProperty(JAVA_SECURITY_KRB5_REALM, ""));
    log.debug("java.security.krb5.kdc={}", System.getProperty(JAVA_SECURITY_KRB5_KDC, ""));
    log.debug("hadoop.security.authentication={}",
            conf.get(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION));
    log.debug("hadoop.security.authorization={}",
            conf.get(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION));
    /*    SecurityUtil.setAuthenticationMethod(
            UserGroupInformation.AuthenticationMethod.KERBEROS, conf);*/
    UserGroupInformation.setConfiguration(conf);
    UserGroupInformation authUser = UserGroupInformation.getCurrentUser();
    log.debug("Authenticating as " + authUser.toString());
    log.debug("Login user is {}", UserGroupInformation.getLoginUser());
    if (!UserGroupInformation.isSecurityEnabled()) {
        throw new BadConfigException("Although secure mode is enabled,"
                + "the application has already set up its user as an insecure entity %s", authUser);
    }
    if (authUser.getAuthenticationMethod() == UserGroupInformation.AuthenticationMethod.SIMPLE) {
        throw new BadConfigException("Auth User is not Kerberized %s"
                + " -security has already been set up with the wrong authentication method. "
                + "This can occur if a file system has already been created prior to the loading of "
                + "the security configuration.", authUser);

    }

    SliderUtils.verifyPrincipalSet(conf, YarnConfiguration.RM_PRINCIPAL);
    SliderUtils.verifyPrincipalSet(conf, DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY);
    return true;
}

From source file:org.apache.streams.hdfs.WebHdfsPersistReader.java

License:Apache License

private synchronized void connectToWebHDFS() {
    try {/* w  ww  .ja v a 2s  .  c om*/
        LOGGER.info("User : {}", this.hdfsConfiguration.getUser());
        UserGroupInformation ugi = UserGroupInformation.createRemoteUser(this.hdfsConfiguration.getUser());
        ugi.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.SIMPLE);

        ugi.doAs(new PrivilegedExceptionAction<Void>() {
            public Void run() throws Exception {
                Configuration conf = new Configuration();
                conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
                LOGGER.info("WebURI : {}", getURI().toString());
                client = FileSystem.get(getURI(), conf);
                LOGGER.info("Connected to WebHDFS");

                /*
                * ************************************************************************************************
                * This code is an example of how you would work with HDFS and you weren't going over
                * the webHDFS protocol.
                *
                * Smashew: 2013-10-01
                * ************************************************************************************************
                conf.set("fs.defaultFS", "hdfs://hadoop.mdigitallife.com:8020/user/" + userName);
                conf.set("namenode.host","0.0.0.0");
                conf.set("hadoop.job.ugi", userName);
                conf.set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, "runner");
                fileSystem.createNewFile(new Path("/user/"+ userName + "/test"));
                FileStatus[] status = fs.listStatus(new Path("/user/" + userName));
                for(int i=0;i<status.length;i++)
                {
                LOGGER.info("Directory: {}", status[i].getPath());
                }
                */
                return null;
            }
        });
    } catch (Exception e) {
        LOGGER.error("There was an error connecting to WebHDFS, please check your settings and try again");
        e.printStackTrace();
    }
}

From source file:org.apache.streams.hdfs.WebHdfsPersistWriter.java

License:Apache License

private synchronized void connectToWebHDFS() {
    try {//ww w .  jav a 2s  .c  o  m
        LOGGER.info("User : {}", this.hdfsConfiguration.getUser());
        UserGroupInformation ugi = UserGroupInformation.createRemoteUser(this.hdfsConfiguration.getUser());
        ugi.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.SIMPLE);

        ugi.doAs(new PrivilegedExceptionAction<Void>() {
            public Void run() throws Exception {
                Configuration conf = new Configuration();
                conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
                LOGGER.info("WebURI : {}", getURI().toString());
                client = FileSystem.get(getURI(), conf);
                LOGGER.info("Connected to WebHDFS");

                /*
                * ************************************************************************************************
                * This code is an example of how you would work with HDFS and you weren't going over
                * the webHDFS protocol.
                *
                * Smashew: 2013-10-01
                * ************************************************************************************************
                conf.set("fs.defaultFS", "hdfs://hadoop.mdigitallife.com:8020/user/" + userName);
                conf.set("namenode.host","0.0.0.0");
                conf.set("hadoop.job.ugi", userName);
                conf.set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, "runner");
                fileSystem.createNewFile(new Path("/user/"+ userName + "/test"));
                FileStatus[] status = fs.listStatus(new Path("/user/" + userName));
                for(int i=0;i<status.length;i++)
                {
                LOGGER.info("Directory: {}", status[i].getPath());
                }
                */
                return null;
            }
        });
    } catch (Exception e) {
        LOGGER.error("There was an error connecting to WebHDFS, please check your settings and try again", e);
        throw new RuntimeException(e);
    }
}

From source file:org.apache.tez.auxservices.TestShuffleHandler.java

License:Apache License

/**
 * Validate the ownership of the map-output files being pulled in. The
 * local-file-system owner of the file should match the user component in the
 *
 * @throws Exception exception//from   www .  j av  a  2s  .co  m
 */
@Test(timeout = 100000)
public void testMapFileAccess() throws IOException {
    // This will run only in NativeIO is enabled as SecureIOUtils need it
    assumeTrue(NativeIO.isAvailable());
    Configuration conf = new Configuration();
    conf.setInt(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY, 0);
    conf.setInt(ShuffleHandler.MAX_SHUFFLE_CONNECTIONS, 3);
    conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
    UserGroupInformation.setConfiguration(conf);
    File absLogDir = new File("target", TestShuffleHandler.class.getSimpleName() + "LocDir").getAbsoluteFile();
    conf.set(YarnConfiguration.NM_LOCAL_DIRS, absLogDir.getAbsolutePath());
    ApplicationId appId = ApplicationId.newInstance(12345, 1);
    LOG.info(appId.toString());
    String appAttemptId = "attempt_12345_1_m_1_0";
    String user = "randomUser";
    String reducerId = "0";
    List<File> fileMap = new ArrayList<File>();
    createShuffleHandlerFiles(absLogDir, user, appId.toString(), appAttemptId, conf, fileMap);
    ShuffleHandler shuffleHandler = new ShuffleHandler() {

        @Override
        protected Shuffle getShuffle(Configuration conf) {
            // replace the shuffle handler with one stubbed for testing
            return new Shuffle(conf) {

                @Override
                protected void verifyRequest(String appid, ChannelHandlerContext ctx, HttpRequest request,
                        HttpResponse response, URL requestUri) throws IOException {
                    // Do nothing.
                }

            };
        }
    };
    shuffleHandler.init(conf);
    try {
        shuffleHandler.start();
        DataOutputBuffer outputBuffer = new DataOutputBuffer();
        outputBuffer.reset();
        Token<JobTokenIdentifier> jt = new Token<JobTokenIdentifier>("identifier".getBytes(),
                "password".getBytes(), new Text(user), new Text("shuffleService"));
        jt.write(outputBuffer);
        shuffleHandler.initializeApplication(new ApplicationInitializationContext(user, appId,
                ByteBuffer.wrap(outputBuffer.getData(), 0, outputBuffer.getLength())));
        URL url = new URL("http://127.0.0.1:"
                + shuffleHandler.getConfig().get(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY)
                + "/mapOutput?job=job_12345_0001&dag=1&reduce=" + reducerId + "&map=attempt_12345_1_m_1_0");
        HttpURLConnection conn = (HttpURLConnection) url.openConnection();
        conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_NAME, ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
        conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_VERSION, ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
        conn.connect();
        byte[] byteArr = new byte[10000];
        try {
            DataInputStream is = new DataInputStream(conn.getInputStream());
            is.readFully(byteArr);
        } catch (EOFException e) {
            // ignore
        }
        // Retrieve file owner name
        FileInputStream is = new FileInputStream(fileMap.get(0));
        String owner = NativeIO.POSIX.getFstat(is.getFD()).getOwner();
        is.close();

        String message = "Owner '" + owner + "' for path " + fileMap.get(0).getAbsolutePath()
                + " did not match expected owner '" + user + "'";
        Assert.assertTrue((new String(byteArr)).contains(message));
    } finally {
        shuffleHandler.stop();
        FileUtil.fullyDelete(absLogDir);
    }
}

From source file:org.apache.tez.auxservices.TestShuffleHandler.java

License:Apache License

@Test(timeout = 100000)
public void testGetMapOutputInfo() throws Exception {
    final ArrayList<Throwable> failures = new ArrayList<Throwable>(1);
    Configuration conf = new Configuration();
    conf.setInt(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY, 0);
    conf.setInt(ShuffleHandler.MAX_SHUFFLE_CONNECTIONS, 3);
    conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "simple");
    UserGroupInformation.setConfiguration(conf);
    File absLogDir = new File("target", TestShuffleHandler.class.getSimpleName() + "LocDir").getAbsoluteFile();
    conf.set(YarnConfiguration.NM_LOCAL_DIRS, absLogDir.getAbsolutePath());
    ApplicationId appId = ApplicationId.newInstance(12345, 1);
    String appAttemptId = "attempt_12345_1_m_1_0";
    String user = "randomUser";
    String reducerId = "0";
    List<File> fileMap = new ArrayList<File>();
    createShuffleHandlerFiles(absLogDir, user, appId.toString(), appAttemptId, conf, fileMap);
    ShuffleHandler shuffleHandler = new ShuffleHandler() {
        @Override/*w ww  . ja v a 2 s.c om*/
        protected Shuffle getShuffle(Configuration conf) {
            // replace the shuffle handler with one stubbed for testing
            return new Shuffle(conf) {
                @Override
                protected void populateHeaders(List<String> mapIds, String outputBaseStr, String dagId,
                        String user, Range reduceRange, HttpResponse response, boolean keepAliveParam,
                        Map<String, MapOutputInfo> infoMap) throws IOException {
                    // Only set response headers and skip everything else
                    // send some dummy value for content-length
                    super.setResponseHeaders(response, keepAliveParam, 100);
                }

                @Override
                protected void verifyRequest(String appid, ChannelHandlerContext ctx, HttpRequest request,
                        HttpResponse response, URL requestUri) throws IOException {
                    // Do nothing.
                }

                @Override
                protected void sendError(ChannelHandlerContext ctx, String message, HttpResponseStatus status) {
                    if (failures.size() == 0) {
                        failures.add(new Error(message));
                        ctx.getChannel().close();
                    }
                }

                @Override
                protected ChannelFuture sendMapOutput(ChannelHandlerContext ctx, Channel ch, String user,
                        String mapId, Range reduceRange, MapOutputInfo info) throws IOException {
                    // send a shuffle header
                    ShuffleHeader header = new ShuffleHeader("attempt_12345_1_m_1_0", 5678, 5678, 1);
                    DataOutputBuffer dob = new DataOutputBuffer();
                    header.write(dob);
                    return ch.write(wrappedBuffer(dob.getData(), 0, dob.getLength()));
                }
            };
        }
    };
    shuffleHandler.init(conf);
    try {
        shuffleHandler.start();
        DataOutputBuffer outputBuffer = new DataOutputBuffer();
        outputBuffer.reset();
        Token<JobTokenIdentifier> jt = new Token<JobTokenIdentifier>("identifier".getBytes(),
                "password".getBytes(), new Text(user), new Text("shuffleService"));
        jt.write(outputBuffer);
        shuffleHandler.initializeApplication(new ApplicationInitializationContext(user, appId,
                ByteBuffer.wrap(outputBuffer.getData(), 0, outputBuffer.getLength())));
        URL url = new URL("http://127.0.0.1:"
                + shuffleHandler.getConfig().get(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY)
                + "/mapOutput?job=job_12345_0001&dag=1&reduce=" + reducerId + "&map=attempt_12345_1_m_1_0");
        HttpURLConnection conn = (HttpURLConnection) url.openConnection();
        conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_NAME, ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
        conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_VERSION, ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
        conn.connect();
        try {
            DataInputStream is = new DataInputStream(conn.getInputStream());
            ShuffleHeader header = new ShuffleHeader();
            header.readFields(is);
            is.close();
        } catch (EOFException e) {
            // ignore
        }
        Assert.assertEquals("sendError called due to shuffle error", 0, failures.size());
    } finally {
        shuffleHandler.stop();
        FileUtil.fullyDelete(absLogDir);
    }
}

From source file:org.apache.tez.auxservices.TestShuffleHandler.java

License:Apache License

@Test(timeout = 5000)
public void testDagDelete() throws Exception {
    final ArrayList<Throwable> failures = new ArrayList<Throwable>(1);
    Configuration conf = new Configuration();
    conf.setInt(ShuffleHandler.MAX_SHUFFLE_CONNECTIONS, 3);
    conf.setInt(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY, 0);
    conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "simple");
    UserGroupInformation.setConfiguration(conf);
    File absLogDir = new File("target", TestShuffleHandler.class.getSimpleName() + "LocDir").getAbsoluteFile();
    conf.set(YarnConfiguration.NM_LOCAL_DIRS, absLogDir.getAbsolutePath());
    ApplicationId appId = ApplicationId.newInstance(12345, 1);
    String appAttemptId = "attempt_12345_1_m_1_0";
    String user = "randomUser";
    List<File> fileMap = new ArrayList<File>();
    createShuffleHandlerFiles(absLogDir, user, appId.toString(), appAttemptId, conf, fileMap);
    ShuffleHandler shuffleHandler = new ShuffleHandler() {
        @Override/*from  w w w . j  a  va 2  s.  c o m*/
        protected Shuffle getShuffle(Configuration conf) {
            // replace the shuffle handler with one stubbed for testing
            return new Shuffle(conf) {
                @Override
                protected void sendError(ChannelHandlerContext ctx, String message, HttpResponseStatus status) {
                    if (failures.size() == 0) {
                        failures.add(new Error(message));
                        ctx.getChannel().close();
                    }
                }
            };
        }
    };
    shuffleHandler.init(conf);
    try {
        shuffleHandler.start();
        DataOutputBuffer outputBuffer = new DataOutputBuffer();
        outputBuffer.reset();
        Token<JobTokenIdentifier> jt = new Token<JobTokenIdentifier>("identifier".getBytes(),
                "password".getBytes(), new Text(user), new Text("shuffleService"));
        jt.write(outputBuffer);
        shuffleHandler.initializeApplication(new ApplicationInitializationContext(user, appId,
                ByteBuffer.wrap(outputBuffer.getData(), 0, outputBuffer.getLength())));
        URL url = new URL(
                "http://127.0.0.1:" + shuffleHandler.getConfig().get(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY)
                        + "/mapOutput?dagAction=delete&job=job_12345_0001&dag=1");
        HttpURLConnection conn = (HttpURLConnection) url.openConnection();
        conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_NAME, ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
        conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_VERSION, ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
        String dagDirStr = StringUtils.join(Path.SEPARATOR, new String[] { absLogDir.getAbsolutePath(),
                ShuffleHandler.USERCACHE, user, ShuffleHandler.APPCACHE, appId.toString(), "dag_1/" });
        File dagDir = new File(dagDirStr);
        Assert.assertTrue("Dag Directory does not exist!", dagDir.exists());
        conn.connect();
        try {
            DataInputStream is = new DataInputStream(conn.getInputStream());
            is.close();
            Assert.assertFalse("Dag Directory was not deleted!", dagDir.exists());
        } catch (EOFException e) {
            // ignore
        }
        Assert.assertEquals("sendError called due to shuffle error", 0, failures.size());
    } finally {
        shuffleHandler.stop();
        FileUtil.fullyDelete(absLogDir);
    }
}