Example usage for java.security PrivilegedExceptionAction PrivilegedExceptionAction

List of usage examples for java.security PrivilegedExceptionAction PrivilegedExceptionAction

Introduction

In this page you can find the example usage for java.security PrivilegedExceptionAction PrivilegedExceptionAction.

Prototype

PrivilegedExceptionAction

Source Link

Usage

From source file:org.apache.hadoop.fs.FileSystem.java

public static FileSystem get(final URI uri, final Configuration conf, final String user)
        throws IOException, InterruptedException {
    UserGroupInformation ugi;/*from ww w  .j  a  va 2 s . c  o m*/
    if (user == null) {
        ugi = UserGroupInformation.getCurrentUser();
    } else {
        ugi = UserGroupInformation.createRemoteUser(user);
    }
    return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
        public FileSystem run() throws IOException {
            return get(uri, conf);
        }
    });
}

From source file:org.apache.hadoop.hbase.security.token.TokenUtil.java

/**
 * Obtain an authentication token on behalf of the given user and add it to
 * the credentials for the given map reduce job.
 * @param conf The configuration for connecting to the cluster
 * @param user The user for whom to obtain the token
 * @param job The job instance in which the token should be stored
 * @throws IOException If making a remote call to the {@link TokenProvider} fails
 * @throws InterruptedException If executing as the given user is interrupted
 *//*from   w w w.j av  a 2s .  co m*/
public static void obtainTokenForJob(final Configuration conf, UserGroupInformation user, Job job)
        throws IOException, InterruptedException {
    try {
        Token<AuthenticationTokenIdentifier> token = user
                .doAs(new PrivilegedExceptionAction<Token<AuthenticationTokenIdentifier>>() {
                    public Token<AuthenticationTokenIdentifier> run() throws Exception {
                        return obtainToken(conf);
                    }
                });

        if (token == null) {
            throw new IOException("No token returned for user " + user.getUserName());
        }
        Text clusterId = getClusterId(token);
        LOG.info("Obtained token " + token.getKind().toString() + " for user " + user.getUserName()
                + " on cluster " + clusterId.toString());
        job.getCredentials().addToken(clusterId, token);
    } catch (IOException ioe) {
        throw ioe;
    } catch (InterruptedException ie) {
        throw ie;
    } catch (RuntimeException re) {
        throw re;
    } catch (Exception e) {
        throw new UndeclaredThrowableException(e,
                "Unexpected exception obtaining token for user " + user.getUserName());
    }
}

From source file:org.apache.coheigea.bigdata.solr.ranger.SolrAuthorizationMockTest.java

private void performTest(final int expectedStatus, String user, String group, RequestType requestType,
        String ipAddress) throws Exception {
    Map<String, Object> requestParameters = new HashMap<>();
    requestParameters.put("userPrincipal", user);
    requestParameters.put("collectionRequests", "docs");
    requestParameters.put("requestType", requestType);
    if (ipAddress != null) {
        requestParameters.put("ipAddress", ipAddress);
    }//  ww  w.j a v a 2s.c o  m

    final AuthorizationContext context = new MockAuthorizationContext(requestParameters);

    if (group != null) {
        UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] { group });
        ugi.doAs(new PrivilegedExceptionAction<Void>() {
            public Void run() throws Exception {
                AuthorizationResponse authResp = plugin.authorize(context);
                Assert.assertEquals(expectedStatus, authResp.statusCode);
                return null;
            }
        });
    } else {
        AuthorizationResponse authResp = plugin.authorize(context);
        Assert.assertEquals(expectedStatus, authResp.statusCode);
    }
}

From source file:org.apache.axis2.jaxws.description.builder.converter.JavaClassToDBCConverter.java

/**
 * The only method we will expose to users of this class. It will trigger the creation of the
 * <code>DescriptionBuilderComposite</code> based on our service class. It will also handle the
 * case of an impl class that references an SEI.
 *
 * @return - <code>DescriptionBuilderComposite</code>
 *//*  w  ww .j  av  a 2s.  c  om*/
public HashMap<String, DescriptionBuilderComposite> produceDBC() {
    if (log.isDebugEnabled()) {
        log.debug("Creating DescriptionBuilderComposite map from Java Class.");
    }

    HashMap<String, DescriptionBuilderComposite> dbcMap = new HashMap<String, DescriptionBuilderComposite>();
    for (int i = 0; i < classes.size(); i++) {
        buildDBC(dbcMap, classes.get(i));
        if (seiClassName != null && !seiClassName.equals("")) {
            try {
                final ClassLoader contextClassLoader = (ClassLoader) AccessController
                        .doPrivileged(new PrivilegedAction() {
                            public Object run() {
                                return Thread.currentThread().getContextClassLoader();
                            }
                        });
                Class seiClass = null;
                try {
                    seiClass = (Class) AccessController.doPrivileged(new PrivilegedExceptionAction() {
                        public Object run() throws ClassNotFoundException {
                            return contextClassLoader.loadClass(seiClassName);
                        }
                    });
                } catch (PrivilegedActionException e) {
                    throw (ClassNotFoundException) e.getException();
                }
                buildDBC(dbcMap, seiClass);

                // Also try to see if the SEI has any super interfaces  
                Class[] interfaces = seiClass.getInterfaces();
                for (int j = 0; j < interfaces.length; j++) {
                    buildDBC(dbcMap, interfaces[j]);
                }
            } catch (ClassNotFoundException e) {
                if (log.isDebugEnabled()) {
                    log.debug("Class not found exception caught for class: " + seiClassName, e);
                }
            }
        }
    }
    return dbcMap;
}

From source file:org.apache.hadoop.hdfs.server.namenode.TestSubtreeLockACL.java

@Test
public void testSubtreeMoveBlockedSourceParentAccessAcl() throws IOException, InterruptedException {

    try {//from   w w w .j  a v  a  2s  .  c o  m
        setup();

        //Make src readonly via access acl
        setReadOnlyUserAccessAcl(user2.getShortUserName(), subtree1);

        //Try to move subtree1 under subtree2. Should fail because of access acl.
        FileSystem user2fs = user2.doAs(new PrivilegedExceptionAction<FileSystem>() {
            @Override
            public FileSystem run() throws Exception {
                return FileSystem.get(conf);
            }
        });

        try {
            user2fs.rename(level1folder1, new Path(subtree2, "newname"));
            fail("Acl should block move");
        } catch (AccessControlException expected) {
            assertTrue("Wrong inode triggered access control exception.",
                    expected.getMessage().contains("inode=\"/subtrees/subtree1\""));
            //Operation should fail.
        }

    } finally {
        teardown();
    }
}

From source file:com.mellanox.r4h.TestReadWhileWriting.java

/** Test reading while writing. */
@Test//from  w  w  w .j a v a 2s  . co m
public void pipeline_02_03() throws Exception {
    final Configuration conf = new HdfsConfiguration();
    conf.setLong(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1);

    // create cluster
    final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(4).build();
    try {
        //change the lease limits.
        cluster.setLeasePeriod(SOFT_LEASE_LIMIT, HARD_LEASE_LIMIT);

        //wait for the cluster
        cluster.waitActive();
        final FileSystem fs = cluster.getFileSystem();
        final Path p = new Path(DIR, "file1");
        final int half = BLOCK_SIZE / 2;

        //a. On Machine M1, Create file. Write half block of data.
        //   Invoke DFSOutputStream.hflush() on the dfs file handle.
        //   Do not close file yet.
        {
            final FSDataOutputStream out = fs.create(p, true,
                    fs.getConf().getInt(CommonConfigurationKeys.IO_FILE_BUFFER_SIZE_KEY, 4096), (short) 3,
                    BLOCK_SIZE);
            write(out, 0, half);

            //hflush
            ((DFSOutputStream) out.getWrappedStream()).hflush();
        }

        //b. On another machine M2, open file and verify that the half-block
        //   of data can be read successfully.
        checkFile(p, half, conf);
        MiniDFSClusterBridge.getAppendTestUtilLOG().info("leasechecker.interruptAndJoin()");
        ((DistributedFileSystem) fs).dfs.getLeaseRenewer().interruptAndJoin();

        //c. On M1, append another half block of data.  Close file on M1.
        {
            //sleep to let the lease is expired.
            Thread.sleep(2 * SOFT_LEASE_LIMIT);

            final UserGroupInformation current = UserGroupInformation.getCurrentUser();
            final UserGroupInformation ugi = UserGroupInformation
                    .createUserForTesting(current.getShortUserName() + "x", new String[] { "supergroup" });
            final DistributedFileSystem dfs = ugi.doAs(new PrivilegedExceptionAction<DistributedFileSystem>() {
                @Override
                public DistributedFileSystem run() throws Exception {
                    return (DistributedFileSystem) FileSystem.newInstance(conf);
                }
            });
            final FSDataOutputStream out = append(dfs, p);
            write(out, 0, half);
            out.close();
        }

        //d. On M2, open file and read 1 block of data from it. Close file.
        checkFile(p, 2 * half, conf);
    } finally {
        cluster.shutdown();
    }
}

From source file:org.apache.hadoop.hive.metastore.TUGIBasedProcessor.java

@SuppressWarnings("unchecked")
@Override//from  w w w. j  ava  2  s  .  c om
public boolean process(final TProtocol in, final TProtocol out) throws TException {
    setIpAddress(in);

    final TMessage msg = in.readMessageBegin();
    final ProcessFunction<Iface, ? extends TBase> fn = functions.get(msg.name);
    if (fn == null) {
        TProtocolUtil.skip(in, TType.STRUCT);
        in.readMessageEnd();
        TApplicationException x = new TApplicationException(TApplicationException.UNKNOWN_METHOD,
                "Invalid method name: '" + msg.name + "'");
        out.writeMessageBegin(new TMessage(msg.name, TMessageType.EXCEPTION, msg.seqid));
        x.write(out);
        out.writeMessageEnd();
        out.getTransport().flush();
        return true;
    }
    TUGIContainingTransport ugiTrans = (TUGIContainingTransport) in.getTransport();
    // Store ugi in transport if the rpc is set_ugi
    if (msg.name.equalsIgnoreCase("set_ugi")) {
        try {
            handleSetUGI(ugiTrans, (set_ugi<Iface>) fn, msg, in, out);
        } catch (TException e) {
            throw e;
        } catch (Exception e) {
            throw new TException(e.getCause());
        }
        return true;
    }
    UserGroupInformation clientUgi = ugiTrans.getClientUGI();
    if (null == clientUgi) {
        // At this point, transport must contain client ugi, if it doesn't then its an old client.
        fn.process(msg.seqid, in, out, iface);
        return true;
    } else { // Found ugi, perform doAs().
        PrivilegedExceptionAction<Void> pvea = new PrivilegedExceptionAction<Void>() {
            @Override
            public Void run() {
                try {
                    fn.process(msg.seqid, in, out, iface);
                    return null;
                } catch (TException te) {
                    throw new RuntimeException(te);
                }
            }
        };
        try {
            clientUgi.doAs(pvea);
            return true;
        } catch (RuntimeException rte) {
            if (rte.getCause() instanceof TException) {
                throw (TException) rte.getCause();
            }
            throw rte;
        } catch (InterruptedException ie) {
            throw new RuntimeException(ie); // unexpected!
        } catch (IOException ioe) {
            throw new RuntimeException(ioe); // unexpected!
        } finally {
            try {
                FileSystem.closeAllForUGI(clientUgi);
            } catch (IOException e) {
                LOG.error("Could not clean up file-system handles for UGI: " + clientUgi, e);
            }
        }
    }
}

From source file:org.apache.coheigea.bigdata.hdfs.ranger.HDFSRangerTest.java

@org.junit.Test
public void readTest() throws Exception {
    FileSystem fileSystem = hdfsCluster.getFileSystem();

    // Write a file - the AccessControlEnforcer won't be invoked as we are the "superuser"
    final Path file = new Path("/tmp/tmpdir/data-file2");
    FSDataOutputStream out = fileSystem.create(file);
    for (int i = 0; i < 1024; ++i) {
        out.write(("data" + i + "\n").getBytes("UTF-8"));
        out.flush();/*from   ww w. j ava 2  s .c om*/
    }
    out.close();

    // Change permissions to read-only
    fileSystem.setPermission(file, new FsPermission(FsAction.READ, FsAction.NONE, FsAction.NONE));

    // Now try to read the file as "bob" - this should be allowed (by the policy - user)
    UserGroupInformation ugi = UserGroupInformation.createUserForTesting("bob", new String[] {});
    ugi.doAs(new PrivilegedExceptionAction<Void>() {

        public Void run() throws Exception {
            Configuration conf = new Configuration();
            conf.set("fs.defaultFS", defaultFs);

            FileSystem fs = FileSystem.get(conf);

            // Read the file
            FSDataInputStream in = fs.open(file);
            ByteArrayOutputStream output = new ByteArrayOutputStream();
            IOUtils.copy(in, output);
            String content = new String(output.toByteArray());
            Assert.assertTrue(content.startsWith("data0"));

            fs.close();
            return null;
        }
    });

    // Now try to read the file as "alice" - this should be allowed (by the policy - group)
    ugi = UserGroupInformation.createUserForTesting("alice", new String[] { "IT" });
    ugi.doAs(new PrivilegedExceptionAction<Void>() {

        public Void run() throws Exception {
            Configuration conf = new Configuration();
            conf.set("fs.defaultFS", defaultFs);

            FileSystem fs = FileSystem.get(conf);

            // Read the file
            FSDataInputStream in = fs.open(file);
            ByteArrayOutputStream output = new ByteArrayOutputStream();
            IOUtils.copy(in, output);
            String content = new String(output.toByteArray());
            Assert.assertTrue(content.startsWith("data0"));

            fs.close();
            return null;
        }
    });

    // Now try to read the file as unknown user "eve" - this should not be allowed
    ugi = UserGroupInformation.createUserForTesting("eve", new String[] {});
    ugi.doAs(new PrivilegedExceptionAction<Void>() {

        public Void run() throws Exception {
            Configuration conf = new Configuration();
            conf.set("fs.defaultFS", defaultFs);

            FileSystem fs = FileSystem.get(conf);

            // Read the file
            try {
                fs.open(file);
                Assert.fail("Failure expected on an incorrect permission");
            } catch (RemoteException ex) {
                // expected
                Assert.assertTrue(RangerAccessControlException.class.getName().equals(ex.getClassName()));
            }

            fs.close();
            return null;
        }
    });

    // Now try to read the file as known user "dave" - this should not be allowed, as he doesn't have the correct permissions
    ugi = UserGroupInformation.createUserForTesting("dave", new String[] {});
    ugi.doAs(new PrivilegedExceptionAction<Void>() {

        public Void run() throws Exception {
            Configuration conf = new Configuration();
            conf.set("fs.defaultFS", defaultFs);

            FileSystem fs = FileSystem.get(conf);

            // Read the file
            try {
                fs.open(file);
                Assert.fail("Failure expected on an incorrect permission");
            } catch (RemoteException ex) {
                // expected
                Assert.assertTrue(RangerAccessControlException.class.getName().equals(ex.getClassName()));
            }

            fs.close();
            return null;
        }
    });
}

From source file:org.apache.hadoop.gateway.provider.federation.jwt.filter.JWTFederationFilter.java

private void continueWithEstablishedSecurityContext(Subject subject, final HttpServletRequest request,
        final HttpServletResponse response, final FilterChain chain) throws IOException, ServletException {
    try {//from  w w w.j a va  2  s .c  o  m
        Subject.doAs(subject, new PrivilegedExceptionAction<Object>() {
            @Override
            public Object run() throws Exception {
                chain.doFilter(request, response);
                return null;
            }
        });
    } catch (PrivilegedActionException e) {
        Throwable t = e.getCause();
        if (t instanceof IOException) {
            throw (IOException) t;
        } else if (t instanceof ServletException) {
            throw (ServletException) t;
        } else {
            throw new ServletException(t);
        }
    }
}

From source file:io.druid.security.kerberos.DruidKerberosAuthenticationHandler.java

@Override
public void init(Properties config) throws ServletException {
    try {//from w  w  w . j ava 2s .  c o m
        String principal = config.getProperty(PRINCIPAL);
        if (principal == null || principal.trim().length() == 0) {
            throw new ServletException("Principal not defined in configuration");
        }
        keytab = config.getProperty(KEYTAB, keytab);
        if (keytab == null || keytab.trim().length() == 0) {
            throw new ServletException("Keytab not defined in configuration");
        }
        if (!new File(keytab).exists()) {
            throw new ServletException("Keytab does not exist: " + keytab);
        }

        // use all SPNEGO principals in the keytab if a principal isn't
        // specifically configured
        final String[] spnegoPrincipals;
        if (principal.equals("*")) {
            spnegoPrincipals = KerberosUtil.getPrincipalNames(keytab, Pattern.compile("HTTP/.*"));
            if (spnegoPrincipals.length == 0) {
                throw new ServletException("Principals do not exist in the keytab");
            }
        } else {
            spnegoPrincipals = new String[] { principal };
        }

        String nameRules = config.getProperty(NAME_RULES, null);
        if (nameRules != null) {
            KerberosName.setRules(nameRules);
        }

        for (String spnegoPrincipal : spnegoPrincipals) {
            log.info("Login using keytab %s, for principal %s", keytab, spnegoPrincipal);
            final KerberosAuthenticator.DruidKerberosConfiguration kerberosConfiguration = new KerberosAuthenticator.DruidKerberosConfiguration(
                    keytab, spnegoPrincipal);
            final LoginContext loginContext = new LoginContext("", serverSubject, null, kerberosConfiguration);
            try {
                loginContext.login();
            } catch (LoginException le) {
                log.warn(le, "Failed to login as [%s]", spnegoPrincipal);
                throw new AuthenticationException(le);
            }
            loginContexts.add(loginContext);
        }
        try {
            gssManager = Subject.doAs(serverSubject, new PrivilegedExceptionAction<GSSManager>() {

                @Override
                public GSSManager run() throws Exception {
                    return GSSManager.getInstance();
                }
            });
        } catch (PrivilegedActionException ex) {
            throw ex.getException();
        }
    } catch (Exception ex) {
        throw new ServletException(ex);
    }
}