Example usage for java.security PrivilegedExceptionAction PrivilegedExceptionAction

List of usage examples for java.security PrivilegedExceptionAction PrivilegedExceptionAction

Introduction

In this page you can find the example usage for java.security PrivilegedExceptionAction PrivilegedExceptionAction.

Prototype

PrivilegedExceptionAction

Source Link

Usage

From source file:com.trendmicro.hdfs.webdav.test.TestMkcolSimple.java

@BeforeClass
public static void setup() throws Exception {
    Configuration conf = minicluster.getConfiguration();
    conf.set("hadoop.proxyuser." + UserGroupInformation.getCurrentUser().getShortUserName() + ".groups",
            "users");
    conf.set("hadoop.proxyuser." + UserGroupInformation.getCurrentUser().getShortUserName() + ".hosts",
            "localhost");
    conf.set("hadoop.webdav.authentication.type", "simple");
    conf.setBoolean("hadoop.webdav.authentication.simple.anonymous.allowed", true);

    minicluster.startMiniCluster(gatewayUser);
    LOG.info("Gateway started on port " + minicluster.getGatewayPort());

    FsPermission.setUMask(conf, new FsPermission((short) 0));

    FileSystem fs = minicluster.getTestFileSystem();
    Path path = new Path("/test");
    assertTrue(fs.mkdirs(path, new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL)));
    fs.setOwner(path, ownerUser.getShortUserName(), ownerUser.getGroupNames()[0]);

    ownerUser.doAs(new PrivilegedExceptionAction<Void>() {
        public Void run() throws Exception {
            FileSystem fs = minicluster.getTestFileSystem();
            assertTrue(fs.mkdirs(new Path("/test/private"),
                    new FsPermission(FsAction.ALL, FsAction.READ_EXECUTE, FsAction.NONE)));
            assertTrue(fs.mkdirs(new Path("/test/public"),
                    new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL)));
            return null;
        }/*from w  w w.j a v a2s  .co m*/
    });
}

From source file:org.apache.hadoop.hdfs.server.datanode.DatanodeJspHelper.java

private static DFSClient getDFSClient(final UserGroupInformation user, final String addr,
        final Configuration conf) throws IOException, InterruptedException {
    return user.doAs(new PrivilegedExceptionAction<DFSClient>() {
        @Override/* ww  w  . ja v  a  2s. c o  m*/
        public DFSClient run() throws IOException {
            return new DFSClient(NetUtils.createSocketAddr(addr), conf);
        }
    });
}

From source file:org.apache.axis2.jaxws.lifecycle.BaseLifecycleManager.java

protected void invokePostConstruct(final Method method) throws LifecycleException {
    if (log.isDebugEnabled()) {
        log.debug("Invoking Method with @PostConstruct annotation");
    }//w  w  w . j  a v a  2  s. co m
    /*
     * As per JSR-250 pre destroy and post construct can be
     * public, protected, private or default encapsulation.
     * I will check and make sure the methods are accessible
     * before we invoke them.
     * 
     */

    try {
        AccessController.doPrivileged(new PrivilegedExceptionAction() {
            public Object run() throws InvocationTargetException, IllegalAccessException {
                if (!method.isAccessible()) {
                    method.setAccessible(true);
                }
                return null;
            }
        });
    } catch (PrivilegedActionException e) {
        throw new LifecycleException(e.getException());
    }
    invokeMethod(method, null);
    if (log.isDebugEnabled()) {
        log.debug("Completed invoke on Method with @PostConstruct annotation");
    }
}

From source file:org.apache.hadoop.hbase.security.TestUser.java

@Test
public void testRunAs() throws Exception {
    Configuration conf = HBaseConfiguration.create();
    final User user = User.createUserForTesting(conf, "testuser", new String[] { "foo" });
    final PrivilegedExceptionAction<String> action = new PrivilegedExceptionAction<String>() {
        public String run() throws IOException {
            User u = User.getCurrent();//from w w w. ja  va2 s  . com
            return u.getName();
        }
    };

    String username = user.runAs(action);
    assertEquals("Current user within runAs() should match", "testuser", username);

    // ensure the next run is correctly set
    User user2 = User.createUserForTesting(conf, "testuser2", new String[] { "foo" });
    String username2 = user2.runAs(action);
    assertEquals("Second username should match second user", "testuser2", username2);

    // check the exception version
    username = user.runAs(new PrivilegedExceptionAction<String>() {
        public String run() throws Exception {
            return User.getCurrent().getName();
        }
    });
    assertEquals("User name in runAs() should match", "testuser", username);

    // verify that nested contexts work
    user2.runAs(new PrivilegedExceptionAction<Object>() {
        public Object run() throws IOException, InterruptedException {
            String nestedName = user.runAs(action);
            assertEquals("Nest name should match nested user", "testuser", nestedName);
            assertEquals("Current name should match current user", "testuser2", User.getCurrent().getName());
            return null;
        }
    });

    username = user.runAs(new PrivilegedAction<String>() {
        String result = null;

        @Override
        public String run() {
            try {
                return User.getCurrent().getName();
            } catch (IOException e) {
                result = "empty";
            }
            return result;
        }
    });

    assertEquals("Current user within runAs() should match", "testuser", username);
}

From source file:com.scoredev.scores.HighScore.java

public void setHighScore(final int score) throws IOException {
    //check permission first
    SecurityManager sm = System.getSecurityManager();
    if (sm != null) {
        sm.checkPermission(new HighScorePermission(gameName));
    }/*  w  ww.ja v  a  2s. c o m*/

    // need a doPrivileged block to manipulate the file
    try {
        AccessController.doPrivileged(new PrivilegedExceptionAction() {
            public Object run() throws IOException {
                Hashtable scores = null;
                // try to open the existing file. Should have a locking
                // protocol (could use File.createNewFile).
                try {
                    FileInputStream fis = new FileInputStream(highScoreFile);
                    ObjectInputStream ois = new ObjectInputStream(fis);
                    scores = (Hashtable) ois.readObject();
                } catch (Exception e) {
                    // ignore, try and create new file
                }

                // if scores is null, create a new hashtable
                if (scores == null)
                    scores = new Hashtable(13);

                // update the score and save out the new high score
                scores.put(gameName, new Integer(score));
                FileOutputStream fos = new FileOutputStream(highScoreFile);
                ObjectOutputStream oos = new ObjectOutputStream(fos);
                oos.writeObject(scores);
                oos.close();
                return null;
            }
        });
    } catch (PrivilegedActionException pae) {
        throw (IOException) pae.getException();
    }
}

From source file:org.apache.hadoop.mapred.gridmix.LoadJob.java

public Job call() throws IOException, InterruptedException, ClassNotFoundException {
    ugi.doAs(new PrivilegedExceptionAction<Job>() {
        public Job run() throws IOException, ClassNotFoundException, InterruptedException {
            job.setMapperClass(LoadMapper.class);
            job.setReducerClass(LoadReducer.class);
            job.setNumReduceTasks(jobdesc.getNumberReduces());
            job.setMapOutputKeyClass(GridmixKey.class);
            job.setMapOutputValueClass(GridmixRecord.class);
            job.setSortComparatorClass(GridmixKey.Comparator.class);
            job.setGroupingComparatorClass(SpecGroupingComparator.class);
            job.setInputFormatClass(LoadInputFormat.class);
            job.setOutputFormatClass(RawBytesOutputFormat.class);
            job.setPartitionerClass(DraftPartitioner.class);
            job.setJarByClass(LoadJob.class);
            job.getConfiguration().setBoolean("mapred.used.genericoptionsparser", true);
            FileOutputFormat.setOutputPath(job, outdir);
            job.submit();/*from w w  w  .j  a  v  a 2  s .  c  om*/
            return job;
        }
    });

    return job;
}

From source file:org.apache.nifi.hadoop.KerberosKeytabSPNegoScheme.java

@Override
public byte[] generateToken(byte[] input, String authServer, Credentials credentials) {
    Set<Principal> principals = new HashSet<>();
    principals.add(credentials.getUserPrincipal());
    Subject subject = new Subject(false, principals, new HashSet<>(), new HashSet<>());

    try {//from w ww .  ja va 2  s  .c  o  m
        LoginContext loginContext = new LoginContext("", subject, null,
                new KerberosConfiguration(credentials.getUserPrincipal().getName(),
                        ((KerberosKeytabCredentials) credentials).getKeytab()));
        loginContext.login();
        Subject loggedInSubject = loginContext.getSubject();

        return Subject.doAs(loggedInSubject, new PrivilegedExceptionAction<byte[]>() {

            public byte[] run() throws UnknownHostException, ClassNotFoundException, GSSException,
                    IllegalAccessException, NoSuchFieldException {
                GSSManager gssManager = GSSManager.getInstance();
                String servicePrincipal = KerberosUtil.getServicePrincipal("HTTP", authServer);
                Oid serviceOid = KerberosUtil.getOidInstance("NT_GSS_KRB5_PRINCIPAL");
                GSSName serviceName = gssManager.createName(servicePrincipal, serviceOid);
                Oid mechOid = KerberosUtil.getOidInstance("GSS_KRB5_MECH_OID");
                GSSContext gssContext = gssManager.createContext(serviceName, mechOid, null, 0);
                gssContext.requestCredDeleg(true);
                gssContext.requestMutualAuth(true);
                return gssContext.initSecContext(input, 0, input.length);
            }

        });
    } catch (PrivilegedActionException | LoginException e) {
        throw new RuntimeException(e);
    }
}

From source file:org.apache.hadoop.hdfs.TestHftpDelegationToken.java

@Test
public void testHdfsDelegationToken() throws Exception {
    final Configuration conf = new Configuration();
    conf.set(HADOOP_SECURITY_AUTHENTICATION, "kerberos");
    UserGroupInformation.setConfiguration(conf);
    UserGroupInformation user = UserGroupInformation.createUserForTesting("oom", new String[] { "memory" });
    Token<?> token = new Token<TokenIdentifier>(new byte[0], new byte[0],
            DelegationTokenIdentifier.HDFS_DELEGATION_KIND, new Text("127.0.0.1:8020"));
    user.addToken(token);//from  w w w . jav  a  2 s . c o m
    Token<?> token2 = new Token<TokenIdentifier>(null, null, new Text("other token"),
            new Text("127.0.0.1:8020"));
    user.addToken(token2);
    assertEquals("wrong tokens in user", 2, user.getTokens().size());
    FileSystem fs = user.doAs(new PrivilegedExceptionAction<FileSystem>() {
        public FileSystem run() throws Exception {
            return FileSystem.get(new URI("hftp://localhost:50470/"), conf);
        }
    });
    assertSame("wrong kind of file system", HftpFileSystem.class, fs.getClass());
    Field renewToken = HftpFileSystem.class.getDeclaredField("renewToken");
    renewToken.setAccessible(true);
    assertSame("wrong token", token, renewToken.get(fs));
}

From source file:com.marmalade.studio.android.gcm.s3eGCMClientBroadcastReceiver.java

@SuppressWarnings({ "unchecked", "rawtypes" })
private void doNotificationCallback() {

    try {/*from www  . j a v  a 2 s. c  o  m*/

        // Get extension class
        final Class extension_class = Class.forName("s3eGCMClient");

        // Get notification method
        final Method notification_method = extension_class.getMethod("s3eGCMClientNotificationReceived",
                new Class[] {});

        // Access method
        AccessController.doPrivileged(new PrivilegedExceptionAction() {
            public Object run() throws Exception {

                // Set accessible
                if (!notification_method.isAccessible()) {
                    notification_method.setAccessible(true);
                }

                // Invoke
                notification_method.invoke(extension_class.newInstance());

                return null;
            }
        });

    } catch (Exception e) {

        // Do nothing
        // e.printStackTrace();
    }
}

From source file:org.apache.hadoop.hdfs.server.namenode.DelegationTokenServlet.java

@Override
protected void doGet(final HttpServletRequest req, final HttpServletResponse resp)
        throws ServletException, IOException {
    final UserGroupInformation ugi;
    try {//from   w  ww .  ja va  2  s .c  o m
        ugi = getUGI(req, new Configuration());
    } catch (IOException ioe) {
        LOG.info("Request for token received with no authentication from " + req.getRemoteAddr(), ioe);
        resp.sendError(HttpServletResponse.SC_FORBIDDEN, "Unable to identify or authenticate user");
        return;
    }
    LOG.info("Sending token: {" + ugi.getUserName() + "," + req.getRemoteAddr() + "}");
    final ServletContext context = getServletContext();
    final NameNode nn = (NameNode) context.getAttribute("name.node");

    DataOutputStream dos = null;
    try {
        dos = new DataOutputStream(resp.getOutputStream());
        final DataOutputStream dosFinal = dos; // for doAs block
        ugi.doAs(new PrivilegedExceptionAction<Void>() {
            @Override
            public Void run() throws Exception {

                Token<DelegationTokenIdentifier> token = nn
                        .getDelegationToken(new Text(req.getUserPrincipal().getName()));
                String s = nn.rpcAddress.getAddress().getHostAddress() + ":" + nn.rpcAddress.getPort();
                token.setService(new Text(s));
                TokenStorage ts = new TokenStorage();
                ts.addToken(new Text(ugi.getShortUserName()), token);
                ts.write(dosFinal);
                dosFinal.close();
                return null;
            }
        });

    } catch (Exception e) {
        LOG.info("Exception while sending token. Re-throwing. ", e);
        resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
    } finally {
        if (dos != null)
            dos.close();
    }
}