Example usage for org.apache.hadoop.security UserGroupInformation doAs

List of usage examples for org.apache.hadoop.security UserGroupInformation doAs

Introduction

In this page you can find the example usage for org.apache.hadoop.security UserGroupInformation doAs.

Prototype

@InterfaceAudience.Public
@InterfaceStability.Evolving
public <T> T doAs(PrivilegedExceptionAction<T> action) throws IOException, InterruptedException 

Source Link

Document

Run the given action as the user, potentially throwing an exception.

Usage

From source file:org.kitesdk.spring.hbase.example.service.WebPageSnapshotService.java

License:Apache License

/**
 * Get WebPageSnapshotModels for an URL from HBase since the since param.
 *
 * @param url The URL of the page to fetch
 * @param since The models to fetch since
 * @return The list of models that have been fetched for an URL since the
 * since param.//from ww w.j a  v  a2 s.  co m
 */
private List<WebPageSnapshotModel> getWebPageSnapshotsSince(String url, final long since, final String user)
        throws IOException {
    List<WebPageSnapshotModel> snapshots = null;
    final String normalizedUrl = normalizeUrl(url, user);

    UserGroupInformation ugi = UserGroupInformation.createProxyUser(user, UserGroupInformation.getLoginUser());

    ugi.doAs(new PrivilegedAction<List<WebPageSnapshotModel>>() {

        @Override
        public List<WebPageSnapshotModel> run() {
            List<WebPageSnapshotModel> models = new ArrayList<WebPageSnapshotModel>();
            DatasetReader<WebPageSnapshotModel> reader = null;
            try {
                reader = webPageSnapshotModels(user).from("url", normalizedUrl).from("fetchedAtRevTs", 0L)
                        .to("url", normalizedUrl).to("fetchedAtRevTs", since).newReader();
                while (reader.hasNext()) {
                    models.add(reader.next());
                }
            } finally {
                if (reader != null) {
                    reader.close();
                }
            }
            return models;
        }
    });

    return snapshots;
}

From source file:org.kitesdk.spring.hbase.example.service.WebPageSnapshotService.java

License:Apache License

/**
 * Return a WebPageRedirectModel if an URL is one that redirects to a
 * different source. Otherwise, returns null.
 *
 * @return The WebPageRedirectModel//  ww  w.  ja  v a 2s.co m
 */
private WebPageRedirectModel getRedirect(final String url, final String user) throws IOException {
    UserGroupInformation ugi = UserGroupInformation.createProxyUser(user, UserGroupInformation.getLoginUser());

    return ugi.doAs(new PrivilegedAction<WebPageRedirectModel>() {

        @Override
        public WebPageRedirectModel run() {
            Key key = new Key.Builder(webPageRedirectModels(user)).add("url", url).build();
            return webPageRedirectModels(user).get(key);
        }
    });
}

From source file:org.springframework.data.hadoop.fs.DistCp.java

License:Apache License

/**
 * DistCopy using a command-line style (arguments are specified as {@link String}s).
 * /*from   w  w  w.j  a v a2s  .c  om*/
 * @param arguments copy arguments
 */
public void copy(String... arguments) {
    Assert.notEmpty(arguments, "invalid number of arguments");
    // sanitize the arguments
    final List<String> parsedArguments = new ArrayList<String>();
    for (String arg : arguments) {
        parsedArguments.addAll(Arrays.asList(StringUtils.tokenizeToStringArray(arg, " ")));
    }

    try {
        if (StringUtils.hasText(user)) {
            UserGroupInformation ugi = UserGroupInformation.createProxyUser(user,
                    UserGroupInformation.getLoginUser());
            ugi.doAs(new PrivilegedExceptionAction<Void>() {
                @Override
                public Void run() throws Exception {
                    invokeCopy(configuration, parsedArguments.toArray(new String[parsedArguments.size()]));
                    return null;
                }
            });
        } else {
            invokeCopy(configuration, parsedArguments.toArray(new String[parsedArguments.size()]));
        }
    } catch (Exception ex) {
        throw new IllegalStateException("Cannot run distCp impersonated as '" + user + "'", ex);
    }
}

From source file:org.springframework.data.hadoop.mapreduce.HadoopCodeExecutor.java

License:Apache License

protected int runCode() throws Exception {
    // merge configuration options
    final Configuration cfg = resolveConfiguration();

    // resolve target object
    final Class<T> type = resolveTargetClass(cfg);
    final T target = resolveTargetObject(type);

    // setup the invocation context
    Thread th = Thread.currentThread();
    ClassLoader oldTccl = th.getContextClassLoader();

    log.info("Invoking [" + (target != null ? target : type) + "] "
            + (jar != null ? "from jar [" + jar.getURI() + "]" : "") + " with args ["
            + Arrays.toString(arguments) + "]");

    ClassLoader newCL = cfg.getClassLoader();
    boolean isJarCL = newCL instanceof ParentLastURLClassLoader;
    try {/*w ww . j a  v a 2s . c  o m*/
        ExecutionUtils.disableSystemExitCall();
        if (isJarCL) {
            ExecutionUtils.preventHadoopLeaks(beanClassLoader);
        }

        //ExecutionUtils.earlyLeaseDaemonInit(cfg);

        th.setContextClassLoader(newCL);

        if (StringUtils.hasText(user)) {
            UserGroupInformation ugi = UserGroupInformation.createProxyUser(user,
                    UserGroupInformation.getLoginUser());

            return ugi.doAs(new PrivilegedExceptionAction<Integer>() {
                @Override
                public Integer run() throws Exception {
                    return invokeTarget(cfg, target, type, arguments);
                }
            });
        } else {
            return invokeTarget(cfg, target, type, arguments);
        }
    } finally {
        ExecutionUtils.enableSystemExitCall();
        th.setContextClassLoader(oldTccl);

        if (isJarCL) {
            if (closeFs) {
                ExecutionUtils.shutdownFileSystem(cfg);
            }
            ExecutionUtils.patchLeakedClassLoader(newCL, oldTccl);
        }
    }
}

From source file:org.springframework.data.hadoop.mapreduce.JobFactoryBean.java

License:Apache License

@SuppressWarnings("rawtypes")
public void afterPropertiesSet() throws Exception {
    final Configuration cfg = ConfigurationUtils.createFrom(configuration, properties);

    buildGenericOptions(cfg);//from   www.j  ava2 s .com

    if (StringUtils.hasText(user)) {
        UserGroupInformation ugi = UserGroupInformation.createProxyUser(user,
                UserGroupInformation.getLoginUser());
        ugi.doAs(new PrivilegedExceptionAction<Void>() {

            @Override
            public Void run() throws Exception {
                job = new Job(cfg);
                return null;
            }
        });
    } else {
        job = new Job(cfg);
    }

    ClassLoader loader = (beanClassLoader != null ? beanClassLoader
            : org.springframework.util.ClassUtils.getDefaultClassLoader());

    if (jar != null) {
        JobConf conf = (JobConf) job.getConfiguration();
        conf.setJar(jar.getURI().toString());
        loader = ExecutionUtils.createParentLastClassLoader(jar, beanClassLoader, cfg);
        conf.setClassLoader(loader);
    }

    // set first to enable auto-detection of K/V to skip the key/value types to be specified
    if (mapper != null) {
        Class<? extends Mapper> mapperClass = resolveClass(mapper, loader, Mapper.class);
        job.setMapperClass(mapperClass);
        configureMapperTypesIfPossible(job, mapperClass);
    }

    if (reducer != null) {
        Class<? extends Reducer> reducerClass = resolveClass(reducer, loader, Reducer.class);
        job.setReducerClass(reducerClass);
        configureReducerTypesIfPossible(job, reducerClass);
    }

    if (StringUtils.hasText(name)) {
        job.setJobName(name);
    }
    if (combiner != null) {
        job.setCombinerClass(resolveClass(combiner, loader, Reducer.class));
    }
    if (groupingComparator != null) {
        job.setGroupingComparatorClass(resolveClass(groupingComparator, loader, RawComparator.class));
    }
    if (inputFormat != null) {
        job.setInputFormatClass(resolveClass(inputFormat, loader, InputFormat.class));
    }
    if (mapKey != null) {
        job.setMapOutputKeyClass(resolveClass(mapKey, loader, Object.class));
    }
    if (mapValue != null) {
        job.setMapOutputValueClass(resolveClass(mapValue, loader, Object.class));
    }
    if (numReduceTasks != null) {
        job.setNumReduceTasks(numReduceTasks);
    }
    if (key != null) {
        job.setOutputKeyClass(resolveClass(key, loader, Object.class));
    }
    if (value != null) {
        job.setOutputValueClass(resolveClass(value, loader, Object.class));
    }
    if (outputFormat != null) {
        job.setOutputFormatClass(resolveClass(outputFormat, loader, OutputFormat.class));
    }
    if (partitioner != null) {
        job.setPartitionerClass(resolveClass(partitioner, loader, Partitioner.class));
    }
    if (sortComparator != null) {
        job.setSortComparatorClass(resolveClass(sortComparator, loader, RawComparator.class));
    }
    if (StringUtils.hasText(workingDir)) {
        job.setWorkingDirectory(new Path(workingDir));
    }
    if (jarClass != null) {
        job.setJarByClass(jarClass);
    }

    if (!CollectionUtils.isEmpty(inputPaths)) {
        for (String path : inputPaths) {
            FileInputFormat.addInputPath(job, new Path(path));
        }
    }

    if (StringUtils.hasText(outputPath)) {
        FileOutputFormat.setOutputPath(job, new Path(outputPath));
    }

    if (compressOutput != null) {
        FileOutputFormat.setCompressOutput(job, compressOutput);
    }

    if (codecClass != null) {
        FileOutputFormat.setOutputCompressorClass(job,
                resolveClass(codecClass, loader, CompressionCodec.class));
    }

    processJob(job);
}

From source file:org.springframework.data.hadoop.mapreduce.StreamJobFactoryBean.java

License:Apache License

public void afterPropertiesSet() throws Exception {
    Assert.isTrue(!ObjectUtils.isEmpty(input), "at least one input required");
    Assert.hasText(output, "the output is required");

    final Configuration cfg = ConfigurationUtils.createFrom(configuration, properties);

    buildGenericOptions(cfg);//  ww  w . j a  v a 2  s  .c  o m

    Map<String, String> args = new LinkedHashMap<String, String>();

    // add unique arguments
    addArgument(output, "-output", args);
    addArgument(mapper, "-mapper", args);
    addArgument(reducer, "-reducer", args);
    addArgument(combiner, "-combiner", args);
    addArgument(partitioner, "-partitioner", args);
    addArgument(inputFormat, "-inputformat", args);
    addArgument(outputFormat, "-outputformat", args);

    if (numReduceTasks != null)
        addArgument(numReduceTasks.toString(), "-numReduceTasks", args);

    // translate map to list
    final List<String> argsList = new ArrayList<String>(args.size() * 2 + 16);

    for (Map.Entry<String, String> entry : args.entrySet()) {
        argsList.add(entry.getKey());
        argsList.add(entry.getValue());
    }

    // add -cmdEnv (to the list not the map to avoid key collision)
    if (cmdEnv != null) {
        Enumeration<?> props = cmdEnv.propertyNames();
        while (props.hasMoreElements()) {
            String key = props.nextElement().toString();
            argsList.add("-cmdenv");
            argsList.add(key + "=" + cmdEnv.getProperty(key));
        }
    }

    // add recurring arguments
    addArgument(input, "-input", argsList);

    if (StringUtils.hasText(user)) {
        UserGroupInformation ugi = UserGroupInformation.createProxyUser(user,
                UserGroupInformation.getLoginUser());
        ugi.doAs(new PrivilegedExceptionAction<Void>() {

            @Override
            public Void run() throws Exception {
                job = new Job(createStreamJob(cfg, argsList.toArray(new String[argsList.size()])));
                return null;
            }
        });
    } else {
        job = new Job(createStreamJob(cfg, argsList.toArray(new String[argsList.size()])));
    }

    job.setJobName(name);
}

From source file:org.springframework.data.hadoop.pig.PigServerFactoryBean.java

License:Apache License

protected PigServer createPigInstance() throws Exception {
    final PigContext ctx = (pigContext != null ? pigContext : new PigContext());

    // apparently if not connected, pig can cause all kind of errors
    PigServer pigServer = null;/*from   ww  w.  j ava  2s.c om*/

    try {
        if (StringUtils.hasText(user)) {
            UserGroupInformation ugi = UserGroupInformation.createProxyUser(user,
                    UserGroupInformation.getLoginUser());
            pigServer = ugi.doAs(new PrivilegedExceptionAction<PigServer>() {
                @Override
                public PigServer run() throws Exception {
                    return new PigServer(ctx, true);
                }
            });
        } else {
            pigServer = new PigServer(ctx, true);
        }
    } catch (ExecException ex) {
        throw PigUtils.convert(ex);
    }

    if (!CollectionUtils.isEmpty(pathToSkip)) {
        for (String path : pathToSkip) {
            pigServer.addPathToSkip(path);
        }
    }

    if (parallelism != null) {
        pigServer.setDefaultParallel(parallelism);
    }

    if (StringUtils.hasText(jobName)) {
        pigServer.setJobName(jobName);
    } else {
        if (StringUtils.hasText(beanName)) {
            pigServer.setJobName(beanName);
        }
    }

    if (StringUtils.hasText(jobPriority)) {
        pigServer.setJobPriority(jobPriority);
    }

    if (validateEachStatement != null) {
        PigUtils.validateEachStatement(pigServer, validateEachStatement);
    }

    if (!CollectionUtils.isEmpty(scripts)) {
        PigUtils.runWithConversion(pigServer, scripts, false);
    }

    return pigServer;
}

From source file:org.trustedanalytics.servicebroker.hive.config.KerberosDataSource.java

License:Apache License

private Connection getConnection(UserGroupInformation signedOnUserSubject)
        throws PrivilegedActionException, IOException, InterruptedException {
    return (Connection) signedOnUserSubject.doAs((PrivilegedExceptionAction<Object>) () -> {
        Class.forName(JDBC_DRIVER);
        return DriverManager.getConnection(jdbcUrl, null, null);
    });/*  ww  w  .  j a v a2  s  . c  o  m*/
}

From source file:oz.hadoop.yarn.test.cluster.InJvmContainerExecutor.java

License:Apache License

/**
 *
 *///from  ww w .  j a  v  a 2 s .c  o m
private int launchJavaContainer(final Container container, final Path containerWorkDir) {
    UserGroupInformation ugi = this.buildUgiForContainerLaunching(container, containerWorkDir);
    return ugi.doAs(new PrivilegedAction<Integer>() {
        @Override
        public Integer run() {
            return InJvmContainerExecutor.this.doLaunch(container, containerWorkDir);
        }
    });
}

From source file:ruciotools.WebRucioGrep.java

License:Apache License

/**
 * @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
 *//*  w  w  w. j ava2 s.  c  om*/
protected void doGet(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    final PrintWriter out = response.getWriter();

    Enumeration<String> parameterNames = request.getParameterNames();
    List<String> params = new ArrayList<String>();
    while (parameterNames.hasMoreElements()) {
        String paramName = parameterNames.nextElement();
        for (String v : request.getParameterValues(paramName)) {
            params.add("-" + paramName);
            params.add(v);
        }

    }
    final String[] args = new String[params.size()];
    params.toArray(args);

    FileSystem fs = DistributedFileSystem.get(new Configuration());
    FSDataOutputStream of1 = fs.create(new Path("/user/rucio01/log/test-MR-before.ralph"));
    of1.write(new String("ralph").getBytes());
    of1.close();

    System.out.println("--------------status---:" + UserGroupInformation.isLoginKeytabBased());
    System.out.println("--------------current user---:" + UserGroupInformation.getCurrentUser());
    UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
    boolean isKeyTab = false; //ugi.isFromKeytab();
    if (isKeyTab) {
        ugi.checkTGTAndReloginFromKeytab();
    } else {
        UserGroupInformation.loginUserFromKeytab("rucio01", "/etc/hadoop/conf/rucio01.keytab");
        isKeyTab = UserGroupInformation.isLoginKeytabBased();
        if (isKeyTab) {
            ugi = UserGroupInformation.getCurrentUser();
        }
    }
    System.out.println("---------AFTER LOGIN-----:");
    System.out.println("--------------status---:" + UserGroupInformation.isLoginKeytabBased());
    System.out.println("--------------current user---:" + UserGroupInformation.getCurrentUser());

    //FileSystem fs = DistributedFileSystem.get(new Configuration());
    FSDataOutputStream of = fs.create(new Path("/user/rucio01/log/test-MR-outer.ralph"));
    of.write(new String("ralph").getBytes());
    of.close();

    try {
        ugi.doAs(new PrivilegedExceptionAction<Void>() {
            public Void run() throws Exception {

                FileSystem fs = DistributedFileSystem.get(new Configuration());
                FSDataOutputStream of = fs.create(new Path("/user/rucio01/log/test-MR-inner.ralph"));
                of.write(new String("ralph").getBytes());
                of.close();

                // Verify input parameters
                Map<String, Object> settings = Grep.parseCommandLineArguments(args);
                if ((Boolean) settings.get("printUsage")) {
                    out.println((String) settings.get("errorMessage"));
                    out.println(Grep.printUsage());
                    return null;
                }

                // Derive tmp dir for job output
                settings.put("tempDir",
                        new Path("rucio-grep-" + Integer.toString(new Random().nextInt(Integer.MAX_VALUE))));

                // Execute MR job
                try {
                    if (!Grep.runJob(settings)) {
                        out.println("Something went wrong :-(\n");
                        out.println(
                                "Hints: (1) do not redirect stderr to /dev/null (2)  consider setting -excludeTmpFiles in case of IOExceptions\n");
                    }
                } catch (Exception e) {
                    out.println(e);
                    return null;
                }
                try {
                    out.println(Grep.getResults(settings));
                } catch (Exception e) {
                    out.println("No job output found in " + settings.get("tempDir").toString());
                    out.println(e);
                }
                return null;
            }
        });
    } catch (Exception e) {
        System.out.println(e);
    }
}