Example usage for org.apache.hadoop.security UserGroupInformation createProxyUser

List of usage examples for org.apache.hadoop.security UserGroupInformation createProxyUser

Introduction

In this page you can find the example usage for org.apache.hadoop.security UserGroupInformation createProxyUser.

Prototype

@InterfaceAudience.Public
@InterfaceStability.Evolving
public static UserGroupInformation createProxyUser(String user, UserGroupInformation realUser) 

Source Link

Document

Create a proxy user using username of the effective user and the ugi of the real user.

Usage

From source file:org.kitesdk.spring.hbase.example.service.WebPageSnapshotService.java

License:Apache License

/**
 * Get WebPageSnapshotModels for an URL from HBase since the since param.
 *
 * @param url The URL of the page to fetch
 * @param since The models to fetch since
 * @return The list of models that have been fetched for an URL since the
 * since param./* w w  w.  j  a  v a2  s .c o m*/
 */
private List<WebPageSnapshotModel> getWebPageSnapshotsSince(String url, final long since, final String user)
        throws IOException {
    List<WebPageSnapshotModel> snapshots = null;
    final String normalizedUrl = normalizeUrl(url, user);

    UserGroupInformation ugi = UserGroupInformation.createProxyUser(user, UserGroupInformation.getLoginUser());

    ugi.doAs(new PrivilegedAction<List<WebPageSnapshotModel>>() {

        @Override
        public List<WebPageSnapshotModel> run() {
            List<WebPageSnapshotModel> models = new ArrayList<WebPageSnapshotModel>();
            DatasetReader<WebPageSnapshotModel> reader = null;
            try {
                reader = webPageSnapshotModels(user).from("url", normalizedUrl).from("fetchedAtRevTs", 0L)
                        .to("url", normalizedUrl).to("fetchedAtRevTs", since).newReader();
                while (reader.hasNext()) {
                    models.add(reader.next());
                }
            } finally {
                if (reader != null) {
                    reader.close();
                }
            }
            return models;
        }
    });

    return snapshots;
}

From source file:org.kitesdk.spring.hbase.example.service.WebPageSnapshotService.java

License:Apache License

/**
 * Return a WebPageRedirectModel if an URL is one that redirects to a
 * different source. Otherwise, returns null.
 *
 * @return The WebPageRedirectModel/*from   w w  w .j av a2  s  . co  m*/
 */
private WebPageRedirectModel getRedirect(final String url, final String user) throws IOException {
    UserGroupInformation ugi = UserGroupInformation.createProxyUser(user, UserGroupInformation.getLoginUser());

    return ugi.doAs(new PrivilegedAction<WebPageRedirectModel>() {

        @Override
        public WebPageRedirectModel run() {
            Key key = new Key.Builder(webPageRedirectModels(user)).add("url", url).build();
            return webPageRedirectModels(user).get(key);
        }
    });
}

From source file:org.openflamingo.remote.thrift.thriftfs.ThriftHandlerBase.java

License:Apache License

/**
 * The methods below should be called by all RPCs with the request context
 * passed in, whenever said RPCs are accessing Hadoop-internal methods. These
 * assume the authentication role of the requester.
 * <p/>/* w ww . j  a  va 2 s  .c o  m*/
 * Most of the time you can just wrap the entire contents of the method with
 * these methods. If, however, your RPC needs to throw an exception not of
 * type IOException, then you may need to wrap only the portions which
 * actually touch Hadoop, and then throw your own exception(s) based on the
 * result of these calls.
 */
protected <T> T assumeUserContextAndExecute(RequestContext ctx, PrivilegedExceptionAction<T> action)
        throws IOException {
    try {
        return UserGroupInformation
                .createProxyUser(ctx.confOptions.get("effective_user"), UserGroupInformation.getCurrentUser())
                .doAs(action);
    } catch (Throwable e) {
        throw ThriftUtils.toThrift(e);
    }
}

From source file:org.openflamingo.remote.thrift.thriftfs.ThriftHandlerBase.java

License:Apache License

protected <T> T assumeUserContextAndExecute(RequestContext ctx, PrivilegedAction<T> action) {
    try {/*from   ww  w  . j  av a2s. co m*/
        return UserGroupInformation
                .createProxyUser(ctx.confOptions.get("effective_user"), UserGroupInformation.getCurrentUser())
                .doAs(action);
    } catch (java.io.IOException e) {
        // This should only be thrown in the event getLoginUser() fails.
        throw new Error(e);
    }
}

From source file:org.springframework.data.hadoop.fs.DistCp.java

License:Apache License

/**
 * DistCopy using a command-line style (arguments are specified as {@link String}s).
 * /*  w  w  w  .j a  v  a  2s . c  om*/
 * @param arguments copy arguments
 */
public void copy(String... arguments) {
    Assert.notEmpty(arguments, "invalid number of arguments");
    // sanitize the arguments
    final List<String> parsedArguments = new ArrayList<String>();
    for (String arg : arguments) {
        parsedArguments.addAll(Arrays.asList(StringUtils.tokenizeToStringArray(arg, " ")));
    }

    try {
        if (StringUtils.hasText(user)) {
            UserGroupInformation ugi = UserGroupInformation.createProxyUser(user,
                    UserGroupInformation.getLoginUser());
            ugi.doAs(new PrivilegedExceptionAction<Void>() {
                @Override
                public Void run() throws Exception {
                    invokeCopy(configuration, parsedArguments.toArray(new String[parsedArguments.size()]));
                    return null;
                }
            });
        } else {
            invokeCopy(configuration, parsedArguments.toArray(new String[parsedArguments.size()]));
        }
    } catch (Exception ex) {
        throw new IllegalStateException("Cannot run distCp impersonated as '" + user + "'", ex);
    }
}

From source file:org.springframework.data.hadoop.mapreduce.HadoopCodeExecutor.java

License:Apache License

protected int runCode() throws Exception {
    // merge configuration options
    final Configuration cfg = resolveConfiguration();

    // resolve target object
    final Class<T> type = resolveTargetClass(cfg);
    final T target = resolveTargetObject(type);

    // setup the invocation context
    Thread th = Thread.currentThread();
    ClassLoader oldTccl = th.getContextClassLoader();

    log.info("Invoking [" + (target != null ? target : type) + "] "
            + (jar != null ? "from jar [" + jar.getURI() + "]" : "") + " with args ["
            + Arrays.toString(arguments) + "]");

    ClassLoader newCL = cfg.getClassLoader();
    boolean isJarCL = newCL instanceof ParentLastURLClassLoader;
    try {/*from ww  w  . java 2s  . c  om*/
        ExecutionUtils.disableSystemExitCall();
        if (isJarCL) {
            ExecutionUtils.preventHadoopLeaks(beanClassLoader);
        }

        //ExecutionUtils.earlyLeaseDaemonInit(cfg);

        th.setContextClassLoader(newCL);

        if (StringUtils.hasText(user)) {
            UserGroupInformation ugi = UserGroupInformation.createProxyUser(user,
                    UserGroupInformation.getLoginUser());

            return ugi.doAs(new PrivilegedExceptionAction<Integer>() {
                @Override
                public Integer run() throws Exception {
                    return invokeTarget(cfg, target, type, arguments);
                }
            });
        } else {
            return invokeTarget(cfg, target, type, arguments);
        }
    } finally {
        ExecutionUtils.enableSystemExitCall();
        th.setContextClassLoader(oldTccl);

        if (isJarCL) {
            if (closeFs) {
                ExecutionUtils.shutdownFileSystem(cfg);
            }
            ExecutionUtils.patchLeakedClassLoader(newCL, oldTccl);
        }
    }
}

From source file:org.springframework.data.hadoop.mapreduce.JobFactoryBean.java

License:Apache License

@SuppressWarnings("rawtypes")
public void afterPropertiesSet() throws Exception {
    final Configuration cfg = ConfigurationUtils.createFrom(configuration, properties);

    buildGenericOptions(cfg);/*w w  w. ja v a2  s  .  c  o  m*/

    if (StringUtils.hasText(user)) {
        UserGroupInformation ugi = UserGroupInformation.createProxyUser(user,
                UserGroupInformation.getLoginUser());
        ugi.doAs(new PrivilegedExceptionAction<Void>() {

            @Override
            public Void run() throws Exception {
                job = new Job(cfg);
                return null;
            }
        });
    } else {
        job = new Job(cfg);
    }

    ClassLoader loader = (beanClassLoader != null ? beanClassLoader
            : org.springframework.util.ClassUtils.getDefaultClassLoader());

    if (jar != null) {
        JobConf conf = (JobConf) job.getConfiguration();
        conf.setJar(jar.getURI().toString());
        loader = ExecutionUtils.createParentLastClassLoader(jar, beanClassLoader, cfg);
        conf.setClassLoader(loader);
    }

    // set first to enable auto-detection of K/V to skip the key/value types to be specified
    if (mapper != null) {
        Class<? extends Mapper> mapperClass = resolveClass(mapper, loader, Mapper.class);
        job.setMapperClass(mapperClass);
        configureMapperTypesIfPossible(job, mapperClass);
    }

    if (reducer != null) {
        Class<? extends Reducer> reducerClass = resolveClass(reducer, loader, Reducer.class);
        job.setReducerClass(reducerClass);
        configureReducerTypesIfPossible(job, reducerClass);
    }

    if (StringUtils.hasText(name)) {
        job.setJobName(name);
    }
    if (combiner != null) {
        job.setCombinerClass(resolveClass(combiner, loader, Reducer.class));
    }
    if (groupingComparator != null) {
        job.setGroupingComparatorClass(resolveClass(groupingComparator, loader, RawComparator.class));
    }
    if (inputFormat != null) {
        job.setInputFormatClass(resolveClass(inputFormat, loader, InputFormat.class));
    }
    if (mapKey != null) {
        job.setMapOutputKeyClass(resolveClass(mapKey, loader, Object.class));
    }
    if (mapValue != null) {
        job.setMapOutputValueClass(resolveClass(mapValue, loader, Object.class));
    }
    if (numReduceTasks != null) {
        job.setNumReduceTasks(numReduceTasks);
    }
    if (key != null) {
        job.setOutputKeyClass(resolveClass(key, loader, Object.class));
    }
    if (value != null) {
        job.setOutputValueClass(resolveClass(value, loader, Object.class));
    }
    if (outputFormat != null) {
        job.setOutputFormatClass(resolveClass(outputFormat, loader, OutputFormat.class));
    }
    if (partitioner != null) {
        job.setPartitionerClass(resolveClass(partitioner, loader, Partitioner.class));
    }
    if (sortComparator != null) {
        job.setSortComparatorClass(resolveClass(sortComparator, loader, RawComparator.class));
    }
    if (StringUtils.hasText(workingDir)) {
        job.setWorkingDirectory(new Path(workingDir));
    }
    if (jarClass != null) {
        job.setJarByClass(jarClass);
    }

    if (!CollectionUtils.isEmpty(inputPaths)) {
        for (String path : inputPaths) {
            FileInputFormat.addInputPath(job, new Path(path));
        }
    }

    if (StringUtils.hasText(outputPath)) {
        FileOutputFormat.setOutputPath(job, new Path(outputPath));
    }

    if (compressOutput != null) {
        FileOutputFormat.setCompressOutput(job, compressOutput);
    }

    if (codecClass != null) {
        FileOutputFormat.setOutputCompressorClass(job,
                resolveClass(codecClass, loader, CompressionCodec.class));
    }

    processJob(job);
}

From source file:org.springframework.data.hadoop.mapreduce.StreamJobFactoryBean.java

License:Apache License

public void afterPropertiesSet() throws Exception {
    Assert.isTrue(!ObjectUtils.isEmpty(input), "at least one input required");
    Assert.hasText(output, "the output is required");

    final Configuration cfg = ConfigurationUtils.createFrom(configuration, properties);

    buildGenericOptions(cfg);//from  ww  w.ja  v  a 2 s. c  om

    Map<String, String> args = new LinkedHashMap<String, String>();

    // add unique arguments
    addArgument(output, "-output", args);
    addArgument(mapper, "-mapper", args);
    addArgument(reducer, "-reducer", args);
    addArgument(combiner, "-combiner", args);
    addArgument(partitioner, "-partitioner", args);
    addArgument(inputFormat, "-inputformat", args);
    addArgument(outputFormat, "-outputformat", args);

    if (numReduceTasks != null)
        addArgument(numReduceTasks.toString(), "-numReduceTasks", args);

    // translate map to list
    final List<String> argsList = new ArrayList<String>(args.size() * 2 + 16);

    for (Map.Entry<String, String> entry : args.entrySet()) {
        argsList.add(entry.getKey());
        argsList.add(entry.getValue());
    }

    // add -cmdEnv (to the list not the map to avoid key collision)
    if (cmdEnv != null) {
        Enumeration<?> props = cmdEnv.propertyNames();
        while (props.hasMoreElements()) {
            String key = props.nextElement().toString();
            argsList.add("-cmdenv");
            argsList.add(key + "=" + cmdEnv.getProperty(key));
        }
    }

    // add recurring arguments
    addArgument(input, "-input", argsList);

    if (StringUtils.hasText(user)) {
        UserGroupInformation ugi = UserGroupInformation.createProxyUser(user,
                UserGroupInformation.getLoginUser());
        ugi.doAs(new PrivilegedExceptionAction<Void>() {

            @Override
            public Void run() throws Exception {
                job = new Job(createStreamJob(cfg, argsList.toArray(new String[argsList.size()])));
                return null;
            }
        });
    } else {
        job = new Job(createStreamJob(cfg, argsList.toArray(new String[argsList.size()])));
    }

    job.setJobName(name);
}

From source file:org.springframework.data.hadoop.pig.PigServerFactoryBean.java

License:Apache License

protected PigServer createPigInstance() throws Exception {
    final PigContext ctx = (pigContext != null ? pigContext : new PigContext());

    // apparently if not connected, pig can cause all kind of errors
    PigServer pigServer = null;//from  w  w w.java  2 s  . co m

    try {
        if (StringUtils.hasText(user)) {
            UserGroupInformation ugi = UserGroupInformation.createProxyUser(user,
                    UserGroupInformation.getLoginUser());
            pigServer = ugi.doAs(new PrivilegedExceptionAction<PigServer>() {
                @Override
                public PigServer run() throws Exception {
                    return new PigServer(ctx, true);
                }
            });
        } else {
            pigServer = new PigServer(ctx, true);
        }
    } catch (ExecException ex) {
        throw PigUtils.convert(ex);
    }

    if (!CollectionUtils.isEmpty(pathToSkip)) {
        for (String path : pathToSkip) {
            pigServer.addPathToSkip(path);
        }
    }

    if (parallelism != null) {
        pigServer.setDefaultParallel(parallelism);
    }

    if (StringUtils.hasText(jobName)) {
        pigServer.setJobName(jobName);
    } else {
        if (StringUtils.hasText(beanName)) {
            pigServer.setJobName(beanName);
        }
    }

    if (StringUtils.hasText(jobPriority)) {
        pigServer.setJobPriority(jobPriority);
    }

    if (validateEachStatement != null) {
        PigUtils.validateEachStatement(pigServer, validateEachStatement);
    }

    if (!CollectionUtils.isEmpty(scripts)) {
        PigUtils.runWithConversion(pigServer, scripts, false);
    }

    return pigServer;
}

From source file:uk.ac.gla.terrier.probos.controller.ControllerServer.java

License:Open Source License

protected boolean storeJobScript(final JobInformation ji, final String requestorUserName, final byte[] source)
        throws IOException {
    final String jobFolderName = String.valueOf(Math.abs(random.nextInt()));

    final Path jobFolder = new Path(probosFolder, jobFolderName);
    final Path script = new Path(probosFolder, jobFolderName + ".SC");
    PrivilegedExceptionAction<Path> submitAction = new PrivilegedExceptionAction<Path>() {
        public Path run() throws Exception {
            FileSystem fs = FileSystem.get(yConf);
            fs.mkdirs(jobFolder);//from   w ww  .  j av  a  2  s  .  c om
            OutputStream os = fs.create(script);
            os.write(source);
            os.close();
            LOG.info("Wrote " + source.length + " bytes to " + script.toString() + " as the job script for job "
                    + ji.jobId);
            return script;
        }
    };

    //setuid to the requestor's user id
    UserGroupInformation proxyUser = UserGroupInformation.createProxyUser(requestorUserName,
            UserGroupInformation.getLoginUser());
    Path rtr = null;
    try {
        if (UserGroupInformation.isSecurityEnabled())
            rtr = proxyUser.doAs(submitAction);
        else
            rtr = submitAction.run();
        ji.proxyUser = proxyUser;
        ji.scriptLocation = rtr;
        ji.folderLocation = jobFolder;
        ji.modify();
        return true;
    } catch (Exception e) {
        LOG.error("Could not store job file!", e);
        return false;
    }
}