Example usage for org.apache.hadoop.security UserGroupInformation loginUserFromKeytab

List of usage examples for org.apache.hadoop.security UserGroupInformation loginUserFromKeytab

Introduction

In this page you can find the example usage for org.apache.hadoop.security UserGroupInformation loginUserFromKeytab.

Prototype

@InterfaceAudience.Public
@InterfaceStability.Evolving
public static void loginUserFromKeytab(String user, String path) throws IOException 

Source Link

Document

Log a user in from a keytab file.

Usage

From source file:org.apache.zeppelin.jdbc.security.JDBCSecurityImpl.java

License:Apache License

/***
 * @param properties/*from   w  ww .  j a  va  2s . c o  m*/
 */
public static void createSecureConfiguration(Properties properties, AuthenticationMethod authType) {
    switch (authType) {
    case KERBEROS:
        Configuration conf = new org.apache.hadoop.conf.Configuration();
        conf.set("hadoop.security.authentication", KERBEROS.toString());
        UserGroupInformation.setConfiguration(conf);
        try {
            // Check TGT before calling login
            // Ref: https://github.com/apache/hadoop/blob/release-3.0.1-RC1/hadoop-common-project/
            // hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java#L1232
            if (!UserGroupInformation.isSecurityEnabled()
                    || UserGroupInformation.getCurrentUser().getAuthenticationMethod() != KERBEROS
                    || !UserGroupInformation.isLoginKeytabBased()) {
                UserGroupInformation.loginUserFromKeytab(properties.getProperty("zeppelin.jdbc.principal"),
                        properties.getProperty("zeppelin.jdbc.keytab.location"));
            } else {
                LOGGER.info(
                        "The user has already logged in using Keytab and principal, " + "no action required");
            }
        } catch (IOException e) {
            LOGGER.error("Failed to get either keytab location or principal name in the " + "interpreter", e);
        }
    }
}

From source file:org.apache.zeppelin.spark.OldSparkInterpreter.java

License:Apache License

@Override
public void open() throws InterpreterException {
    this.enableSupportedVersionCheck = java.lang.Boolean
            .parseBoolean(getProperty("zeppelin.spark.enableSupportedVersionCheck", "true"));

    // set properties and do login before creating any spark stuff for secured cluster
    if (isYarnMode()) {
        System.setProperty("SPARK_YARN_MODE", "true");
    }/*w  w  w  .j ava2 s.c  o m*/
    if (getProperties().containsKey("spark.yarn.keytab")
            && getProperties().containsKey("spark.yarn.principal")) {
        try {
            String keytab = getProperties().getProperty("spark.yarn.keytab");
            String principal = getProperties().getProperty("spark.yarn.principal");
            UserGroupInformation.loginUserFromKeytab(principal, keytab);
        } catch (IOException e) {
            throw new RuntimeException("Can not pass kerberos authentication", e);
        }
    }

    conf = new SparkConf();
    URL[] urls = getClassloaderUrls();

    // Very nice discussion about how scala compiler handle classpath
    // https://groups.google.com/forum/#!topic/scala-user/MlVwo2xCCI0

    /*
     * > val env = new nsc.Settings(errLogger) > env.usejavacp.value = true > val p = new
     * Interpreter(env) > p.setContextClassLoader > Alternatively you can set the class path through
     * nsc.Settings.classpath.
     *
     * >> val settings = new Settings() >> settings.usejavacp.value = true >>
     * settings.classpath.value += File.pathSeparator + >> System.getProperty("java.class.path") >>
     * val in = new Interpreter(settings) { >> override protected def parentClassLoader =
     * getClass.getClassLoader >> } >> in.setContextClassLoader()
     */
    Settings settings = new Settings();

    // process args
    String args = getProperty("args");
    if (args == null) {
        args = "";
    }

    String[] argsArray = args.split(" ");
    LinkedList<String> argList = new LinkedList<>();
    for (String arg : argsArray) {
        argList.add(arg);
    }

    DepInterpreter depInterpreter = getParentSparkInterpreter()
            .getInterpreterInTheSameSessionByClassName(DepInterpreter.class, false);
    String depInterpreterClasspath = "";
    if (depInterpreter != null) {
        SparkDependencyContext depc = depInterpreter.getDependencyContext();
        if (depc != null) {
            List<File> files = depc.getFiles();
            if (files != null) {
                for (File f : files) {
                    if (depInterpreterClasspath.length() > 0) {
                        depInterpreterClasspath += File.pathSeparator;
                    }
                    depInterpreterClasspath += f.getAbsolutePath();
                }
            }
        }
    }

    if (Utils.isScala2_10()) {
        scala.collection.immutable.List<String> list = JavaConversions.asScalaBuffer(argList).toList();

        Object sparkCommandLine = Utils.instantiateClass("org.apache.spark.repl.SparkCommandLine",
                new Class[] { scala.collection.immutable.List.class }, new Object[] { list });

        settings = (Settings) Utils.invokeMethod(sparkCommandLine, "settings");
    } else {
        String sparkReplClassDir = getProperty("spark.repl.classdir");
        if (sparkReplClassDir == null) {
            sparkReplClassDir = System.getProperty("spark.repl.classdir");
        }
        if (sparkReplClassDir == null) {
            sparkReplClassDir = System.getProperty("java.io.tmpdir");
        }

        synchronized (sharedInterpreterLock) {
            if (outputDir == null) {
                outputDir = createTempDir(sparkReplClassDir);
            }
        }
        argList.add("-Yrepl-class-based");
        argList.add("-Yrepl-outdir");
        argList.add(outputDir.getAbsolutePath());

        String classpath = "";
        if (conf.contains("spark.jars")) {
            classpath = StringUtils.join(conf.get("spark.jars").split(","), File.separator);
        }

        if (!depInterpreterClasspath.isEmpty()) {
            if (!classpath.isEmpty()) {
                classpath += File.separator;
            }
            classpath += depInterpreterClasspath;
        }

        if (!classpath.isEmpty()) {
            argList.add("-classpath");
            argList.add(classpath);
        }

        scala.collection.immutable.List<String> list = JavaConversions.asScalaBuffer(argList).toList();

        settings.processArguments(list, true);
    }

    // set classpath for scala compiler
    PathSetting pathSettings = settings.classpath();
    String classpath = "";

    List<File> paths = currentClassPath();
    for (File f : paths) {
        if (classpath.length() > 0) {
            classpath += File.pathSeparator;
        }
        classpath += f.getAbsolutePath();
    }

    if (urls != null) {
        for (URL u : urls) {
            if (classpath.length() > 0) {
                classpath += File.pathSeparator;
            }
            classpath += u.getFile();
        }
    }

    // add dependency from DepInterpreter
    if (classpath.length() > 0) {
        classpath += File.pathSeparator;
    }
    classpath += depInterpreterClasspath;

    // add dependency from local repo
    String localRepo = getProperty("zeppelin.interpreter.localRepo");
    if (localRepo != null) {
        File localRepoDir = new File(localRepo);
        if (localRepoDir.exists()) {
            File[] files = localRepoDir.listFiles();
            if (files != null) {
                for (File f : files) {
                    if (classpath.length() > 0) {
                        classpath += File.pathSeparator;
                    }
                    classpath += f.getAbsolutePath();
                }
            }
        }
    }

    pathSettings.v_$eq(classpath);
    settings.scala$tools$nsc$settings$ScalaSettings$_setter_$classpath_$eq(pathSettings);

    // set classloader for scala compiler
    settings.explicitParentLoader_$eq(new Some<>(Thread.currentThread().getContextClassLoader()));
    BooleanSetting b = (BooleanSetting) settings.usejavacp();
    b.v_$eq(true);
    settings.scala$tools$nsc$settings$StandardScalaSettings$_setter_$usejavacp_$eq(b);

    /* Required for scoped mode.
     * In scoped mode multiple scala compiler (repl) generates class in the same directory.
     * Class names is not randomly generated and look like '$line12.$read$$iw$$iw'
     * Therefore it's possible to generated class conflict(overwrite) with other repl generated
     * class.
     *
     * To prevent generated class name conflict,
     * change prefix of generated class name from each scala compiler (repl) instance.
     *
     * In Spark 2.x, REPL generated wrapper class name should compatible with the pattern
     * ^(\$line(?:\d+)\.\$read)(?:\$\$iw)+$
     *
     * As hashCode() can return a negative integer value and the minus character '-' is invalid
     * in a package name we change it to a numeric value '0' which still conforms to the regexp.
     *
     */
    System.setProperty("scala.repl.name.line", ("$line" + this.hashCode()).replace('-', '0'));

    // To prevent 'File name too long' error on some file system.
    MutableSettings.IntSetting numClassFileSetting = settings.maxClassfileName();
    numClassFileSetting.v_$eq(128);
    settings.scala$tools$nsc$settings$ScalaSettings$_setter_$maxClassfileName_$eq(numClassFileSetting);

    synchronized (sharedInterpreterLock) {
        /* create scala repl */
        if (printREPLOutput()) {
            this.interpreter = new SparkILoop((java.io.BufferedReader) null, new PrintWriter(out));
        } else {
            this.interpreter = new SparkILoop((java.io.BufferedReader) null,
                    new PrintWriter(Console.out(), false));
        }

        interpreter.settings_$eq(settings);

        interpreter.createInterpreter();

        intp = Utils.invokeMethod(interpreter, "intp");
        Utils.invokeMethod(intp, "setContextClassLoader");
        Utils.invokeMethod(intp, "initializeSynchronous");

        if (Utils.isScala2_10()) {
            if (classOutputDir == null) {
                classOutputDir = settings.outputDirs().getSingleOutput().get();
            } else {
                // change SparkIMain class output dir
                settings.outputDirs().setSingleOutput(classOutputDir);
                ClassLoader cl = (ClassLoader) Utils.invokeMethod(intp, "classLoader");
                try {
                    Field rootField = cl.getClass().getSuperclass().getDeclaredField("root");
                    rootField.setAccessible(true);
                    rootField.set(cl, classOutputDir);
                } catch (NoSuchFieldException | IllegalAccessException e) {
                    logger.error(e.getMessage(), e);
                }
            }
        }

        if (Utils.findClass("org.apache.spark.repl.SparkJLineCompletion", true) != null) {
            completer = Utils.instantiateClass("org.apache.spark.repl.SparkJLineCompletion",
                    new Class[] { Utils.findClass("org.apache.spark.repl.SparkIMain") }, new Object[] { intp });
        } else if (Utils.findClass("scala.tools.nsc.interpreter.PresentationCompilerCompleter", true) != null) {
            completer = Utils.instantiateClass("scala.tools.nsc.interpreter.PresentationCompilerCompleter",
                    new Class[] { IMain.class }, new Object[] { intp });
        } else if (Utils.findClass("scala.tools.nsc.interpreter.JLineCompletion", true) != null) {
            completer = Utils.instantiateClass("scala.tools.nsc.interpreter.JLineCompletion",
                    new Class[] { IMain.class }, new Object[] { intp });
        }

        if (Utils.isSpark2()) {
            sparkSession = getSparkSession();
        }
        sc = getSparkContext();
        if (sc.getPoolForName("fair").isEmpty()) {
            Value schedulingMode = org.apache.spark.scheduler.SchedulingMode.FAIR();
            int minimumShare = 0;
            int weight = 1;
            Pool pool = new Pool("fair", schedulingMode, minimumShare, weight);
            sc.taskScheduler().rootPool().addSchedulable(pool);
        }

        sparkVersion = SparkVersion.fromVersionString(sc.version());
        sqlc = getSQLContext();
        dep = getDependencyResolver();
        hooks = getInterpreterGroup().getInterpreterHookRegistry();
        sparkUrl = getSparkUIUrl();
        sparkShims = SparkShims.getInstance(sc.version(), getProperties());
        sparkShims.setupSparkListener(sc.master(), sparkUrl, InterpreterContext.get());
        numReferenceOfSparkContext.incrementAndGet();

        z = new SparkZeppelinContext(sc, sparkShims, hooks,
                Integer.parseInt(getProperty("zeppelin.spark.maxResult")));

        interpret("@transient val _binder = new java.util.HashMap[String, Object]()");
        Map<String, Object> binder;
        if (Utils.isScala2_10()) {
            binder = (Map<String, Object>) getValue("_binder");
        } else {
            binder = (Map<String, Object>) getLastObject();
        }
        binder.put("sc", sc);
        binder.put("sqlc", sqlc);
        binder.put("z", z);

        if (Utils.isSpark2()) {
            binder.put("spark", sparkSession);
        }

        interpret("@transient val z = "
                + "_binder.get(\"z\").asInstanceOf[org.apache.zeppelin.spark.SparkZeppelinContext]");
        interpret("@transient val sc = " + "_binder.get(\"sc\").asInstanceOf[org.apache.spark.SparkContext]");
        interpret("@transient val sqlc = "
                + "_binder.get(\"sqlc\").asInstanceOf[org.apache.spark.sql.SQLContext]");
        interpret("@transient val sqlContext = "
                + "_binder.get(\"sqlc\").asInstanceOf[org.apache.spark.sql.SQLContext]");

        if (Utils.isSpark2()) {
            interpret("@transient val spark = "
                    + "_binder.get(\"spark\").asInstanceOf[org.apache.spark.sql.SparkSession]");
        }

        interpret("import org.apache.spark.SparkContext._");

        if (importImplicit()) {
            if (Utils.isSpark2()) {
                interpret("import spark.implicits._");
                interpret("import spark.sql");
                interpret("import org.apache.spark.sql.functions._");
            } else {
                interpret("import sqlContext.implicits._");
                interpret("import sqlContext.sql");
                interpret("import org.apache.spark.sql.functions._");
            }
        }
    }

    /* Temporary disabling DisplayUtils. see https://issues.apache.org/jira/browse/ZEPPELIN-127
     *
    // Utility functions for display
    intp.interpret("import org.apache.zeppelin.spark.utils.DisplayUtils._");
            
    // Scala implicit value for spark.maxResult
    intp.interpret("import org.apache.zeppelin.spark.utils.SparkMaxResult");
    intp.interpret("implicit val sparkMaxResult = new SparkMaxResult(" +
    Integer.parseInt(getProperty("zeppelin.spark.maxResult")) + ")");
     */

    if (Utils.isScala2_10()) {
        try {
            Method loadFiles = this.interpreter.getClass()
                    .getMethod("org$apache$spark$repl$SparkILoop$$loadFiles", Settings.class);
            loadFiles.invoke(this.interpreter, settings);
        } catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException
                | InvocationTargetException e) {
            throw new InterpreterException(e);
        }
    }

    // add jar from DepInterpreter
    if (depInterpreter != null) {
        SparkDependencyContext depc = depInterpreter.getDependencyContext();
        if (depc != null) {
            List<File> files = depc.getFilesDist();
            if (files != null) {
                for (File f : files) {
                    if (f.getName().toLowerCase().endsWith(".jar")) {
                        sc.addJar(f.getAbsolutePath());
                        logger.info("sc.addJar(" + f.getAbsolutePath() + ")");
                    } else {
                        sc.addFile(f.getAbsolutePath());
                        logger.info("sc.addFile(" + f.getAbsolutePath() + ")");
                    }
                }
            }
        }
    }

    // add jar from local repo
    if (localRepo != null) {
        File localRepoDir = new File(localRepo);
        if (localRepoDir.exists()) {
            File[] files = localRepoDir.listFiles();
            if (files != null) {
                for (File f : files) {
                    if (f.getName().toLowerCase().endsWith(".jar")) {
                        sc.addJar(f.getAbsolutePath());
                        logger.info("sc.addJar(" + f.getAbsolutePath() + ")");
                    } else {
                        sc.addFile(f.getAbsolutePath());
                        logger.info("sc.addFile(" + f.getAbsolutePath() + ")");
                    }
                }
            }
        }
    }

}

From source file:org.apache.zeppelin.spark.SparkInterpreter.java

License:Apache License

@Override
public void open() throws InterpreterException {
    this.enableSupportedVersionCheck = java.lang.Boolean
            .parseBoolean(getProperty("zeppelin.spark.enableSupportedVersionCheck", "true"));

    // set properties and do login before creating any spark stuff for secured cluster
    if (isYarnMode()) {
        System.setProperty("SPARK_YARN_MODE", "true");
    }/*from  w ww  .  ja va  2  s.c o m*/
    if (getProperties().containsKey("spark.yarn.keytab")
            && getProperties().containsKey("spark.yarn.principal")) {
        try {
            String keytab = getProperties().getProperty("spark.yarn.keytab");
            String principal = getProperties().getProperty("spark.yarn.principal");
            UserGroupInformation.loginUserFromKeytab(principal, keytab);
        } catch (IOException e) {
            throw new RuntimeException("Can not pass kerberos authentication", e);
        }
    }

    conf = new SparkConf();
    URL[] urls = getClassloaderUrls();

    // Very nice discussion about how scala compiler handle classpath
    // https://groups.google.com/forum/#!topic/scala-user/MlVwo2xCCI0

    /*
     * > val env = new nsc.Settings(errLogger) > env.usejavacp.value = true > val p = new
     * Interpreter(env) > p.setContextClassLoader > Alternatively you can set the class path through
     * nsc.Settings.classpath.
     *
     * >> val settings = new Settings() >> settings.usejavacp.value = true >>
     * settings.classpath.value += File.pathSeparator + >> System.getProperty("java.class.path") >>
     * val in = new Interpreter(settings) { >> override protected def parentClassLoader =
     * getClass.getClassLoader >> } >> in.setContextClassLoader()
     */
    Settings settings = new Settings();

    // process args
    String args = getProperty("args");
    if (args == null) {
        args = "";
    }

    String[] argsArray = args.split(" ");
    LinkedList<String> argList = new LinkedList<>();
    for (String arg : argsArray) {
        argList.add(arg);
    }

    DepInterpreter depInterpreter = getDepInterpreter();
    String depInterpreterClasspath = "";
    if (depInterpreter != null) {
        SparkDependencyContext depc = depInterpreter.getDependencyContext();
        if (depc != null) {
            List<File> files = depc.getFiles();
            if (files != null) {
                for (File f : files) {
                    if (depInterpreterClasspath.length() > 0) {
                        depInterpreterClasspath += File.pathSeparator;
                    }
                    depInterpreterClasspath += f.getAbsolutePath();
                }
            }
        }
    }

    if (Utils.isScala2_10()) {
        scala.collection.immutable.List<String> list = JavaConversions.asScalaBuffer(argList).toList();

        Object sparkCommandLine = Utils.instantiateClass("org.apache.spark.repl.SparkCommandLine",
                new Class[] { scala.collection.immutable.List.class }, new Object[] { list });

        settings = (Settings) Utils.invokeMethod(sparkCommandLine, "settings");
    } else {
        String sparkReplClassDir = getProperty("spark.repl.classdir");
        if (sparkReplClassDir == null) {
            sparkReplClassDir = System.getProperty("spark.repl.classdir");
        }
        if (sparkReplClassDir == null) {
            sparkReplClassDir = System.getProperty("java.io.tmpdir");
        }

        synchronized (sharedInterpreterLock) {
            if (outputDir == null) {
                outputDir = createTempDir(sparkReplClassDir);
            }
        }
        argList.add("-Yrepl-class-based");
        argList.add("-Yrepl-outdir");
        argList.add(outputDir.getAbsolutePath());

        String classpath = "";
        if (conf.contains("spark.jars")) {
            classpath = StringUtils.join(conf.get("spark.jars").split(","), File.separator);
        }

        if (!depInterpreterClasspath.isEmpty()) {
            if (!classpath.isEmpty()) {
                classpath += File.separator;
            }
            classpath += depInterpreterClasspath;
        }

        if (!classpath.isEmpty()) {
            argList.add("-classpath");
            argList.add(classpath);
        }

        scala.collection.immutable.List<String> list = JavaConversions.asScalaBuffer(argList).toList();

        settings.processArguments(list, true);
    }

    // set classpath for scala compiler
    PathSetting pathSettings = settings.classpath();
    String classpath = "";

    List<File> paths = currentClassPath();
    for (File f : paths) {
        if (classpath.length() > 0) {
            classpath += File.pathSeparator;
        }
        classpath += f.getAbsolutePath();
    }

    if (urls != null) {
        for (URL u : urls) {
            if (classpath.length() > 0) {
                classpath += File.pathSeparator;
            }
            classpath += u.getFile();
        }
    }

    // add dependency from DepInterpreter
    if (classpath.length() > 0) {
        classpath += File.pathSeparator;
    }
    classpath += depInterpreterClasspath;

    // add dependency from local repo
    String localRepo = getProperty("zeppelin.interpreter.localRepo");
    if (localRepo != null) {
        File localRepoDir = new File(localRepo);
        if (localRepoDir.exists()) {
            File[] files = localRepoDir.listFiles();
            if (files != null) {
                for (File f : files) {
                    if (classpath.length() > 0) {
                        classpath += File.pathSeparator;
                    }
                    classpath += f.getAbsolutePath();
                }
            }
        }
    }

    pathSettings.v_$eq(classpath);
    settings.scala$tools$nsc$settings$ScalaSettings$_setter_$classpath_$eq(pathSettings);

    // set classloader for scala compiler
    settings.explicitParentLoader_$eq(new Some<>(Thread.currentThread().getContextClassLoader()));
    BooleanSetting b = (BooleanSetting) settings.usejavacp();
    b.v_$eq(true);
    settings.scala$tools$nsc$settings$StandardScalaSettings$_setter_$usejavacp_$eq(b);

    /* Required for scoped mode.
     * In scoped mode multiple scala compiler (repl) generates class in the same directory.
     * Class names is not randomly generated and look like '$line12.$read$$iw$$iw'
     * Therefore it's possible to generated class conflict(overwrite) with other repl generated
     * class.
     *
     * To prevent generated class name conflict,
     * change prefix of generated class name from each scala compiler (repl) instance.
     *
     * In Spark 2.x, REPL generated wrapper class name should compatible with the pattern
     * ^(\$line(?:\d+)\.\$read)(?:\$\$iw)+$
     *
     * As hashCode() can return a negative integer value and the minus character '-' is invalid
     * in a package name we change it to a numeric value '0' which still conforms to the regexp.
     * 
     */
    System.setProperty("scala.repl.name.line", ("$line" + this.hashCode()).replace('-', '0'));

    // To prevent 'File name too long' error on some file system.
    MutableSettings.IntSetting numClassFileSetting = settings.maxClassfileName();
    numClassFileSetting.v_$eq(128);
    settings.scala$tools$nsc$settings$ScalaSettings$_setter_$maxClassfileName_$eq(numClassFileSetting);

    synchronized (sharedInterpreterLock) {
        /* create scala repl */
        if (printREPLOutput()) {
            this.interpreter = new SparkILoop((java.io.BufferedReader) null, new PrintWriter(out));
        } else {
            this.interpreter = new SparkILoop((java.io.BufferedReader) null,
                    new PrintWriter(Console.out(), false));
        }

        interpreter.settings_$eq(settings);

        interpreter.createInterpreter();

        intp = Utils.invokeMethod(interpreter, "intp");
        Utils.invokeMethod(intp, "setContextClassLoader");
        Utils.invokeMethod(intp, "initializeSynchronous");

        if (Utils.isScala2_10()) {
            if (classOutputDir == null) {
                classOutputDir = settings.outputDirs().getSingleOutput().get();
            } else {
                // change SparkIMain class output dir
                settings.outputDirs().setSingleOutput(classOutputDir);
                ClassLoader cl = (ClassLoader) Utils.invokeMethod(intp, "classLoader");
                try {
                    Field rootField = cl.getClass().getSuperclass().getDeclaredField("root");
                    rootField.setAccessible(true);
                    rootField.set(cl, classOutputDir);
                } catch (NoSuchFieldException | IllegalAccessException e) {
                    logger.error(e.getMessage(), e);
                }
            }
        }

        if (Utils.findClass("org.apache.spark.repl.SparkJLineCompletion", true) != null) {
            completer = Utils.instantiateClass("org.apache.spark.repl.SparkJLineCompletion",
                    new Class[] { Utils.findClass("org.apache.spark.repl.SparkIMain") }, new Object[] { intp });
        } else if (Utils.findClass("scala.tools.nsc.interpreter.PresentationCompilerCompleter", true) != null) {
            completer = Utils.instantiateClass("scala.tools.nsc.interpreter.PresentationCompilerCompleter",
                    new Class[] { IMain.class }, new Object[] { intp });
        } else if (Utils.findClass("scala.tools.nsc.interpreter.JLineCompletion", true) != null) {
            completer = Utils.instantiateClass("scala.tools.nsc.interpreter.JLineCompletion",
                    new Class[] { IMain.class }, new Object[] { intp });
        }

        if (Utils.isSpark2()) {
            sparkSession = getSparkSession();
        }
        sc = getSparkContext();
        if (sc.getPoolForName("fair").isEmpty()) {
            Value schedulingMode = org.apache.spark.scheduler.SchedulingMode.FAIR();
            int minimumShare = 0;
            int weight = 1;
            Pool pool = new Pool("fair", schedulingMode, minimumShare, weight);
            sc.taskScheduler().rootPool().addSchedulable(pool);
        }

        sparkVersion = SparkVersion.fromVersionString(sc.version());

        sqlc = getSQLContext();

        dep = getDependencyResolver();

        hooks = getInterpreterGroup().getInterpreterHookRegistry();

        z = new SparkZeppelinContext(sc, sqlc, hooks,
                Integer.parseInt(getProperty("zeppelin.spark.maxResult")));

        interpret("@transient val _binder = new java.util.HashMap[String, Object]()");
        Map<String, Object> binder;
        if (Utils.isScala2_10()) {
            binder = (Map<String, Object>) getValue("_binder");
        } else {
            binder = (Map<String, Object>) getLastObject();
        }
        binder.put("sc", sc);
        binder.put("sqlc", sqlc);
        binder.put("z", z);

        if (Utils.isSpark2()) {
            binder.put("spark", sparkSession);
        }

        interpret("@transient val z = "
                + "_binder.get(\"z\").asInstanceOf[org.apache.zeppelin.spark.SparkZeppelinContext]");
        interpret("@transient val sc = " + "_binder.get(\"sc\").asInstanceOf[org.apache.spark.SparkContext]");
        interpret("@transient val sqlc = "
                + "_binder.get(\"sqlc\").asInstanceOf[org.apache.spark.sql.SQLContext]");
        interpret("@transient val sqlContext = "
                + "_binder.get(\"sqlc\").asInstanceOf[org.apache.spark.sql.SQLContext]");

        if (Utils.isSpark2()) {
            interpret("@transient val spark = "
                    + "_binder.get(\"spark\").asInstanceOf[org.apache.spark.sql.SparkSession]");
        }

        interpret("import org.apache.spark.SparkContext._");

        if (importImplicit()) {
            if (Utils.isSpark2()) {
                interpret("import spark.implicits._");
                interpret("import spark.sql");
                interpret("import org.apache.spark.sql.functions._");
            } else {
                if (sparkVersion.oldSqlContextImplicits()) {
                    interpret("import sqlContext._");
                } else {
                    interpret("import sqlContext.implicits._");
                    interpret("import sqlContext.sql");
                    interpret("import org.apache.spark.sql.functions._");
                }
            }
        }
    }

    /* Temporary disabling DisplayUtils. see https://issues.apache.org/jira/browse/ZEPPELIN-127
     *
    // Utility functions for display
    intp.interpret("import org.apache.zeppelin.spark.utils.DisplayUtils._");
            
    // Scala implicit value for spark.maxResult
    intp.interpret("import org.apache.zeppelin.spark.utils.SparkMaxResult");
    intp.interpret("implicit val sparkMaxResult = new SparkMaxResult(" +
    Integer.parseInt(getProperty("zeppelin.spark.maxResult")) + ")");
     */

    if (Utils.isScala2_10()) {
        try {
            if (sparkVersion.oldLoadFilesMethodName()) {
                Method loadFiles = this.interpreter.getClass().getMethod("loadFiles", Settings.class);
                loadFiles.invoke(this.interpreter, settings);
            } else {
                Method loadFiles = this.interpreter.getClass()
                        .getMethod("org$apache$spark$repl$SparkILoop$$loadFiles", Settings.class);
                loadFiles.invoke(this.interpreter, settings);
            }
        } catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException
                | InvocationTargetException e) {
            throw new InterpreterException(e);
        }
    }

    // add jar from DepInterpreter
    if (depInterpreter != null) {
        SparkDependencyContext depc = depInterpreter.getDependencyContext();
        if (depc != null) {
            List<File> files = depc.getFilesDist();
            if (files != null) {
                for (File f : files) {
                    if (f.getName().toLowerCase().endsWith(".jar")) {
                        sc.addJar(f.getAbsolutePath());
                        logger.info("sc.addJar(" + f.getAbsolutePath() + ")");
                    } else {
                        sc.addFile(f.getAbsolutePath());
                        logger.info("sc.addFile(" + f.getAbsolutePath() + ")");
                    }
                }
            }
        }
    }

    // add jar from local repo
    if (localRepo != null) {
        File localRepoDir = new File(localRepo);
        if (localRepoDir.exists()) {
            File[] files = localRepoDir.listFiles();
            if (files != null) {
                for (File f : files) {
                    if (f.getName().toLowerCase().endsWith(".jar")) {
                        sc.addJar(f.getAbsolutePath());
                        logger.info("sc.addJar(" + f.getAbsolutePath() + ")");
                    } else {
                        sc.addFile(f.getAbsolutePath());
                        logger.info("sc.addFile(" + f.getAbsolutePath() + ")");
                    }
                }
            }
        }
    }

    numReferenceOfSparkContext.incrementAndGet();
}

From source file:org.apache.zeppelin.submarine.hadoop.HdfsClient.java

License:Apache License

public HdfsClient(Properties properties) {
    String krb5conf = properties.getProperty(SubmarineConstants.SUBMARINE_HADOOP_KRB5_CONF, "");
    if (!StringUtils.isEmpty(krb5conf)) {
        System.setProperty("java.security.krb5.conf", krb5conf);
    }/*w w w.j a va  2s  .  c  o  m*/

    this.hadoopConf = new Configuration();
    // disable checksum for local file system. because interpreter.json may be updated by
    // non-hadoop filesystem api
    // disable caching for file:// scheme to avoid getting LocalFS which does CRC checks
    // this.hadoopConf.setBoolean("fs.file.impl.disable.cache", true);
    this.hadoopConf.set("fs.file.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
    // UserGroupInformation.setConfiguration(hadoopConf);
    this.isSecurityEnabled = UserGroupInformation.isSecurityEnabled();

    if (isSecurityEnabled) {
        String keytab = properties.getProperty(SubmarineConstants.SUBMARINE_HADOOP_KEYTAB, "");
        String principal = properties.getProperty(SubmarineConstants.SUBMARINE_HADOOP_PRINCIPAL, "");

        ZeppelinConfiguration zConf = ZeppelinConfiguration.create();
        if (StringUtils.isEmpty(keytab)) {
            keytab = zConf.getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_SERVER_KERBEROS_KEYTAB);
        }
        if (StringUtils.isEmpty(principal)) {
            principal = zConf.getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_SERVER_KERBEROS_PRINCIPAL);
        }
        if (StringUtils.isBlank(keytab) || StringUtils.isBlank(principal)) {
            throw new RuntimeException(
                    "keytab and principal can not be empty, keytab: " + keytab + ", principal: " + principal);
        }
        try {
            UserGroupInformation.loginUserFromKeytab(principal, keytab);
        } catch (IOException e) {
            throw new RuntimeException("Fail to login via keytab:" + keytab + ", principal:" + principal, e);
        } catch (Exception e) {
            LOGGER.error(e.getMessage(), e);
        }
    }

    try {
        this.fs = FileSystem.get(new URI("/"), this.hadoopConf);
    } catch (IOException e) {
        LOGGER.error(e.getMessage(), e);
    } catch (URISyntaxException e) {
        LOGGER.error(e.getMessage(), e);
    }
}

From source file:org.elasticsearch.repositories.hdfs.HaHdfsFailoverTestSuiteIT.java

License:Apache License

public void testHAFailoverWithRepository() throws Exception {
    RestClient client = client();//from  w w  w  . j a  v a 2 s. co  m
    Map<String, String> emptyParams = Collections.emptyMap();
    Header contentHeader = new BasicHeader("Content-Type", "application/json");

    String esKerberosPrincipal = System.getProperty("test.krb5.principal.es");
    String hdfsKerberosPrincipal = System.getProperty("test.krb5.principal.hdfs");
    String kerberosKeytabLocation = System.getProperty("test.krb5.keytab.hdfs");
    boolean securityEnabled = hdfsKerberosPrincipal != null;

    Configuration hdfsConfiguration = new Configuration();
    hdfsConfiguration.set("dfs.nameservices", "ha-hdfs");
    hdfsConfiguration.set("dfs.ha.namenodes.ha-hdfs", "nn1,nn2");
    hdfsConfiguration.set("dfs.namenode.rpc-address.ha-hdfs.nn1", "localhost:10001");
    hdfsConfiguration.set("dfs.namenode.rpc-address.ha-hdfs.nn2", "localhost:10002");
    hdfsConfiguration.set("dfs.client.failover.proxy.provider.ha-hdfs",
            "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");

    AccessController.doPrivileged((PrivilegedExceptionAction<Void>) () -> {
        if (securityEnabled) {
            // ensure that keytab exists
            Path kt = PathUtils.get(kerberosKeytabLocation);
            if (Files.exists(kt) == false) {
                throw new IllegalStateException("Could not locate keytab at " + kerberosKeytabLocation);
            }
            if (Files.isReadable(kt) != true) {
                throw new IllegalStateException("Could not read keytab at " + kerberosKeytabLocation);
            }
            logger.info("Keytab Length: " + Files.readAllBytes(kt).length);

            // set principal names
            hdfsConfiguration.set("dfs.namenode.kerberos.principal", hdfsKerberosPrincipal);
            hdfsConfiguration.set("dfs.datanode.kerberos.principal", hdfsKerberosPrincipal);
            hdfsConfiguration.set("dfs.data.transfer.protection", "authentication");

            SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS,
                    hdfsConfiguration);
            UserGroupInformation.setConfiguration(hdfsConfiguration);
            UserGroupInformation.loginUserFromKeytab(hdfsKerberosPrincipal, kerberosKeytabLocation);
        } else {
            SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.SIMPLE,
                    hdfsConfiguration);
            UserGroupInformation.setConfiguration(hdfsConfiguration);
            UserGroupInformation.getCurrentUser();
        }
        return null;
    });

    // Create repository
    {
        Response response = client.performRequest("PUT", "/_snapshot/hdfs_ha_repo_read", emptyParams,
                new NStringEntity("{" + "\"type\":\"hdfs\"," + "\"settings\":{"
                        + "\"uri\": \"hdfs://ha-hdfs/\",\n"
                        + "\"path\": \"/user/elasticsearch/existing/readonly-repository\","
                        + "\"readonly\": \"true\"," + securityCredentials(securityEnabled, esKerberosPrincipal)
                        + "\"conf.dfs.nameservices\": \"ha-hdfs\","
                        + "\"conf.dfs.ha.namenodes.ha-hdfs\": \"nn1,nn2\","
                        + "\"conf.dfs.namenode.rpc-address.ha-hdfs.nn1\": \"localhost:10001\","
                        + "\"conf.dfs.namenode.rpc-address.ha-hdfs.nn2\": \"localhost:10002\","
                        + "\"conf.dfs.client.failover.proxy.provider.ha-hdfs\": "
                        + "\"org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider\"" + "}"
                        + "}", Charset.defaultCharset()),
                contentHeader);

        Assert.assertEquals(200, response.getStatusLine().getStatusCode());
    }

    // Get repository
    {
        Response response = client.performRequest("GET", "/_snapshot/hdfs_ha_repo_read/_all", emptyParams);
        Assert.assertEquals(200, response.getStatusLine().getStatusCode());
    }

    // Failover the namenode to the second.
    failoverHDFS("nn1", "nn2", hdfsConfiguration);

    // Get repository again
    {
        Response response = client.performRequest("GET", "/_snapshot/hdfs_ha_repo_read/_all", emptyParams);
        Assert.assertEquals(200, response.getStatusLine().getStatusCode());
    }
}

From source file:org.kaaproject.kaa.server.flume.sink.hdfs.KaaHdfsSink.java

License:Apache License

/**
 * Static synchronized method for static Kerberos login. <br/>
 * Static synchronized due to a thundering herd problem when multiple Sinks
 * attempt to log in using the same principal at the same time with the
 * intention of impersonating different users (or even the same user).
 * If this is not controlled, MIT Kerberos v5 believes it is seeing a replay
 * attach and it returns://from  www. j a  v a 2s .c o  m
 * <blockquote>Request is a replay (34) - PROCESS_TGS</blockquote>
 * In addition, since the underlying Hadoop APIs we are using for
 * impersonation are static, we define this method as static as well.
 *
 * @param principal Fully-qualified principal to use for authentication.
 * @param keytab    Location of keytab file containing credentials for principal.
 * @return Logged-in user
 * @throws IOException if login fails.
 */
private static synchronized UserGroupInformation kerberosLogin(KaaHdfsSink sink, String principal,
        String keytab) throws IOException {

    // if we are the 2nd user thru the lock, the login should already be
    // available statically if login was successful
    UserGroupInformation curUser = null;
    try {
        curUser = UserGroupInformation.getLoginUser();
    } catch (IOException ex) {
        // not a big deal but this shouldn't typically happen because it will
        // generally fall back to the UNIX user
        LOG.debug("Unable to get login user before Kerberos auth attempt.", ex);
    }

    // we already have logged in successfully
    if (curUser != null && curUser.getUserName().equals(principal)) {
        LOG.debug("{}: Using existing principal ({}): {}", new Object[] { sink, principal, curUser });

        // no principal found
    } else {

        LOG.info("{}: Attempting kerberos login as principal ({}) from keytab file ({})",
                new Object[] { sink, principal, keytab });

        // attempt static kerberos login
        UserGroupInformation.loginUserFromKeytab(principal, keytab);
        curUser = UserGroupInformation.getLoginUser();
    }

    return curUser;
}

From source file:org.kitesdk.spring.hbase.example.service.KerberosLoginService.java

License:Apache License

public KerberosLoginService(String applicationPrincipal, String applicationKeytab) throws IOException {

    LOG.debug("application.kerberos.principal=" + applicationPrincipal);
    LOG.debug("application.kerberos.keytab=" + applicationKeytab);

    if (UserGroupInformation.isSecurityEnabled()) {
        Preconditions.checkNotNull(applicationPrincipal,
                "Setting the application.kerberos.principal in hbase-prod.properties "
                        + "is required when security is enabled.");

        Preconditions.checkNotNull(applicationKeytab,
                "Setting the application.kerberos.keytab in hbase-prod.properties is "
                        + "required when security is enabled.");

        LOG.info("Logging in user {} using keytab {}.",
                new Object[] { applicationPrincipal, applicationKeytab });

        UserGroupInformation.loginUserFromKeytab(applicationPrincipal, applicationKeytab);
    }/*from ww w  .j  a v a  2  s  .c  o m*/
}

From source file:org.mule.modules.hdfs.connection.config.Kerberos.java

License:Open Source License

private void loginUserUsingKeytab() throws ConnectionException {
    try {/*from  ww w .j  a  v  a  2  s .  co  m*/
        UserGroupInformation.loginUserFromKeytab(getUsername(), getKeytabPath());
    } catch (IOException e) {
        logger.error("Unable to login user using keytab", e);
        throw new ConnectionException(ConnectionExceptionCode.UNKNOWN, null,
                "Unable to login user using keytab", e);
    }
}

From source file:org.shadowmask.framework.datacenter.hive.KerberizedHiveDc.java

License:Apache License

public void loginKdc() throws ClassNotFoundException, IOException {
    System.setProperty("java.security.krb5.realm", realm);
    System.setProperty("java.security.krb5.kdc", kdc);
    Configuration conf = new Configuration();
    conf.setBoolean("hadoop.security.authorization", true);
    conf.set("hadoop.security.authentication", "kerberos");
    UserGroupInformation.setConfiguration(conf);

    Class.forName(getJdbcDriver());
    UserGroupInformation.loginUserFromKeytab(krbUser, keyTab);

}

From source file:org.trpr.dataaccess.hbase.auth.kerberos.KerberosAuthenticationProvider.java

License:Apache License

/**
 * Interface method implementation. Initializes the specified HBase configuration with Kerberos authentication properties
 * @see org.trpr.dataaccess.hbase.auth.AuthenticationProvider#authenticatePrincipal(org.apache.hadoop.conf.Configuration)
 *///from   w  w w. j  a va2s .c o m
public void authenticatePrincipal(Configuration configuration) throws SecurityException {
    for (Object key : this.kerberosAuthProperties.keySet()) {
        configuration.set(key.toString(), this.kerberosAuthProperties.getProperty(key.toString()));
    }
    System.setProperty(KerberosAuthenticationProvider.KERBEROS_CONFIG_SYSTEM_VARIABLE,
            this.kerberosConfigLocation);
    try {
        UserGroupInformation.setConfiguration(configuration);
        UserGroupInformation.loginUserFromKeytab(this.kerberosPrincipal, this.kerberosKeytabLocation);
        UserGroupInformation loggedInUser = UserGroupInformation.getLoginUser();
        LOGGER.info("Currently logged in Kerberos principal : " + loggedInUser);
        new TGTRenewalThread(configuration, loggedInUser);
    } catch (Exception e) {
        throw new SecurityException("Error authenticating Kerberos Principal : " + this.kerberosPrincipal
                + " .Error message : " + e.getMessage(), e);
    }
}