Example usage for java.security PrivilegedExceptionAction PrivilegedExceptionAction

List of usage examples for java.security PrivilegedExceptionAction PrivilegedExceptionAction

Introduction

In this page you can find the example usage for java.security PrivilegedExceptionAction PrivilegedExceptionAction.

Prototype

PrivilegedExceptionAction

Source Link

Usage

From source file:org.apache.hadoop.hdfs.security.TestClientProtocolWithDelegationToken.java

@Test
public void testDelegationTokenRpc() throws Exception {
    ClientProtocol mockNN = mock(ClientProtocol.class);
    FSNamesystem mockNameSys = mock(FSNamesystem.class);
    when(mockNN.getProtocolVersion(anyString(), anyLong())).thenReturn(ClientProtocol.versionID);
    DelegationTokenSecretManager sm = new DelegationTokenSecretManager(
            DFSConfigKeys.DFS_NAMENODE_DELEGATION_KEY_UPDATE_INTERVAL_DEFAULT,
            DFSConfigKeys.DFS_NAMENODE_DELEGATION_KEY_UPDATE_INTERVAL_DEFAULT,
            DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_MAX_LIFETIME_DEFAULT, 3600000, mockNameSys);
    sm.startThreads();//from w  w  w .  jav  a 2  s. c  om
    final Server server = RPC.getServer(mockNN, ADDRESS, 0, 5, true, conf, sm);

    server.start();

    final UserGroupInformation current = UserGroupInformation.getCurrentUser();
    final InetSocketAddress addr = NetUtils.getConnectAddress(server);
    String user = current.getUserName();
    Text owner = new Text(user);
    DelegationTokenIdentifier dtId = new DelegationTokenIdentifier(owner, owner, null);
    Token<DelegationTokenIdentifier> token = new Token<DelegationTokenIdentifier>(dtId, sm);
    SecurityUtil.setTokenService(token, addr);
    LOG.info("Service IP address for token is " + token.getService());
    current.addToken(token);
    current.doAs(new PrivilegedExceptionAction<Object>() {
        @Override
        public Object run() throws Exception {
            ClientProtocol proxy = null;
            try {
                proxy = (ClientProtocol) RPC.getProxy(ClientProtocol.class, ClientProtocol.versionID, addr,
                        conf);
                proxy.getStats();
            } finally {
                server.stop();
                if (proxy != null) {
                    RPC.stopProxy(proxy);
                }
            }
            return null;
        }
    });
}

From source file:org.apache.hadoop.hdfs.TestReadWhileWriting.java

/**
 * Test reading while writing./* www.  jav  a2  s.  c  o m*/
 */
@Test
public void pipeline_02_03() throws Exception {
    final Configuration conf = new HdfsConfiguration();
    conf.setLong(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1);

    // create cluster
    final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(4).build();
    try {
        //change the lease limits.
        cluster.setLeasePeriod(SOFT_LEASE_LIMIT, HARD_LEASE_LIMIT);

        //wait for the cluster
        cluster.waitActive();
        final FileSystem fs = cluster.getFileSystem();
        final Path p = new Path(DIR, "file1");
        final int half = BLOCK_SIZE / 2;

        //a. On Machine M1, Create file. Write half block of data.
        //   Invoke DFSOutputStream.hflush() on the dfs file handle.
        //   Do not close file yet.
        {
            final FSDataOutputStream out = fs.create(p, true,
                    fs.getConf().getInt(CommonConfigurationKeys.IO_FILE_BUFFER_SIZE_KEY, 4096), (short) 3,
                    BLOCK_SIZE);
            write(out, 0, half);

            //hflush
            ((DFSOutputStream) out.getWrappedStream()).hflush();
        }

        //b. On another machine M2, open file and verify that the half-block
        //   of data can be read successfully.
        checkFile(p, half, conf);
        AppendTestUtil.LOG.info("leasechecker.interruptAndJoin()");
        ((DistributedFileSystem) fs).dfs.getLeaseRenewer().interruptAndJoin();

        //c. On M1, append another half block of data.  Close file on M1.
        {
            //sleep to let the lease is expired.
            Thread.sleep(2 * SOFT_LEASE_LIMIT);

            final UserGroupInformation current = UserGroupInformation.getCurrentUser();
            final UserGroupInformation ugi = UserGroupInformation
                    .createUserForTesting(current.getShortUserName() + "x", new String[] { "supergroup" });
            final DistributedFileSystem dfs = ugi.doAs(new PrivilegedExceptionAction<DistributedFileSystem>() {
                @Override
                public DistributedFileSystem run() throws Exception {
                    return (DistributedFileSystem) FileSystem.newInstance(conf);
                }
            });
            final FSDataOutputStream out = append(dfs, p);
            write(out, 0, half);
            out.close();
        }

        //d. On M2, open file and read 1 block of data from it. Close file.
        checkFile(p, 2 * half, conf);
    } finally {
        cluster.shutdown();
    }
}

From source file:org.apache.hadoop.hbase.security.NettyHBaseSaslRpcClientHandler.java

@Override
protected void channelRead0(ChannelHandlerContext ctx, ByteBuf msg) throws Exception {
    int len = msg.readInt();
    if (len == SaslUtil.SWITCH_TO_SIMPLE_AUTH) {
        saslRpcClient.dispose();//w  w w  .  j a  va2  s.c o  m
        if (saslRpcClient.fallbackAllowed) {
            saslPromise.trySuccess(false);
        } else {
            saslPromise.tryFailure(new FallbackDisallowedException());
        }
        return;
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug("Will read input token of size " + len + " for processing by initSASLContext");
    }
    final byte[] challenge = new byte[len];
    msg.readBytes(challenge);
    byte[] response = ugi.doAs(new PrivilegedExceptionAction<byte[]>() {

        @Override
        public byte[] run() throws Exception {
            return saslRpcClient.evaluateChallenge(challenge);
        }
    });
    if (response != null) {
        writeResponse(ctx, response);
    }
    tryComplete(ctx);
}

From source file:org.apache.axis2.jaxws.server.dispatcher.JavaDispatcher.java

/**
 * @return ClassLoader/*from w w w.ja v a  2  s  .  c  o  m*/
 */
private static ClassLoader getCurrentContextClassLoader() {
    // NOTE: This method must remain private because it uses AccessController
    ClassLoader cl = null;
    try {
        cl = (ClassLoader) org.apache.axis2.java.security.AccessController
                .doPrivileged(new PrivilegedExceptionAction() {
                    public Object run() throws ClassNotFoundException {
                        return Thread.currentThread().getContextClassLoader();
                    }
                });
    } catch (PrivilegedActionException e) {
        // The privileged method will throw a PriviledgedActionException which
        // contains the actual exception.
        if (log.isDebugEnabled()) {
            log.debug("Exception thrown from AccessController: " + e);
        }
        Exception wrappedE = e.getException();
        if (wrappedE instanceof RuntimeException) {
            throw (RuntimeException) wrappedE;
        } else {
            throw new RuntimeException(wrappedE);
        }
    }

    return cl;
}

From source file:org.apache.hadoop.hbase.mapreduce.TestImportTSVWithVisibilityLabels.java

private static void createLabels() throws IOException, InterruptedException {
    PrivilegedExceptionAction<VisibilityLabelsResponse> action = new PrivilegedExceptionAction<VisibilityLabelsResponse>() {
        @Override// w w  w . j  a  va2 s.  c o m
        public VisibilityLabelsResponse run() throws Exception {
            String[] labels = { SECRET, TOPSECRET, CONFIDENTIAL, PUBLIC, PRIVATE };
            try {
                VisibilityClient.addLabels(conf, labels);
                LOG.info("Added labels ");
            } catch (Throwable t) {
                LOG.error("Error in adding labels", t);
                throw new IOException(t);
            }
            return null;
        }
    };
    SUPERUSER.runAs(action);
}

From source file:org.apache.axis2.deployment.util.Utils.java

public static void addFlowHandlers(Flow flow, ClassLoader clsLoader) throws AxisFault {
    int count = flow.getHandlerCount();

    for (int j = 0; j < count; j++) {
        HandlerDescription handlermd = flow.getHandler(j);
        Handler handler;//w ww .  j av  a  2  s.  c  om

        final Class handlerClass = getHandlerClass(handlermd.getClassName(), clsLoader);

        try {
            handler = (Handler) org.apache.axis2.java.security.AccessController
                    .doPrivileged(new PrivilegedExceptionAction() {
                        public Object run() throws InstantiationException, IllegalAccessException {
                            return handlerClass.newInstance();
                        }
                    });
            handler.init(handlermd);
            handlermd.setHandler(handler);
        } catch (PrivilegedActionException e) {
            throw AxisFault.makeFault(e);
        }
    }
}

From source file:com.petalmd.armor.service.ArmorService.java

@Inject
public ArmorService(final Settings settings, final RestController restController, final Client client,
        final Authorizator authorizator, final AuthenticationBackend authenticationBackend,
        final HTTPAuthenticator httpAuthenticator, final SessionStore sessionStore,
        final AuditListener auditListener, final SearchService searchService) {
    super(settings);
    this.restController = restController;
    this.client = client;
    this.settings = settings;
    //securityConfigurationIndex = settings
    //        .get(ConfigConstants.ARMOR_CONFIG_INDEX_NAME, ConfigConstants.DEFAULT_SECURITY_CONFIG_INDEX);
    this.authenticationBackend = authenticationBackend;
    this.authorizator = authorizator;
    this.httpAuthenticator = httpAuthenticator;
    this.sessionStore = sessionStore;

    SecurityManager sm = System.getSecurityManager();
    if (sm != null) {
        sm.checkPermission(new SpecialPermission());
    }//from   w ww.  j a  v  a 2 s.c o  m

    try {
        AccessController.doPrivileged(new PrivilegedExceptionAction<Boolean>() {
            @Override
            public Boolean run() throws Exception {
                method = RestController.class.getDeclaredMethod("getHandler", RestRequest.class);
                method.setAccessible(true);

                return true;
            }
        });
    } catch (final Exception e) {
        log.error(e.toString(), e);
        throw new ElasticsearchException(e.toString());
    }

    final String keyPath = settings.get(ConfigConstants.ARMOR_KEY_PATH, ".");
    //        AccessController.checkPermission(new FilePermission(keyPath+File.separator+"armor_node_key.key", "write"));
    SecretKey sc = null;
    try {
        sc = AccessController.doPrivileged(new PrivilegedExceptionAction<SecretKey>() {
            @Override
            public SecretKey run() throws Exception {
                final File keyFile = new File(keyPath, "armor_node_key.key");
                SecretKey sc = null;
                if (keyFile.exists()) {
                    log.debug("Loaded key from {}", keyFile.getAbsolutePath());
                    sc = new SecretKeySpec(FileUtils.readFileToByteArray(keyFile), "AES");
                } else {
                    final SecureRandom secRandom = SecureRandom.getInstance("SHA1PRNG");
                    final KeyGenerator kg = KeyGenerator.getInstance("AES");
                    kg.init(128, secRandom);
                    final SecretKey secretKey = kg.generateKey();
                    final byte[] enckey = secretKey.getEncoded();

                    if (enckey == null || enckey.length != 16) {
                        throw new Exception("invalid key " + (enckey == null ? -1 : enckey.length));
                    }
                    FileUtils.writeByteArrayToFile(keyFile, enckey);
                    sc = secretKey;
                    log.info("New key written to {}, make sure all nodes have this key",
                            keyFile.getAbsolutePath());
                }
                return sc;
            }
        });
    } catch (final Exception e) {
        log.error("Cannot generate or read secrety key", e);
        throw new ElasticsearchException(e.toString());
    }

    this.auditListener = auditListener;
    //TODO FUTURE index change audit trail

    final boolean checkForRoot = settings.getAsBoolean(ConfigConstants.ARMOR_CHECK_FOR_ROOT, true);

    if (SecurityUtil.isRootUser()) {

        if (checkForRoot) {
            throw new ElasticsearchException(
                    "You're trying to run elasticsearch as root or Windows Administrator and thats forbidden.");
        } else {
            log.warn(
                    "You're trying to run elasticsearch as root or Windows Administrator! Thats a potential security issue.");
        }

    }

    /*final String scriptingStatus = settings.get(ScriptService.DISABLE_DYNAMIC_SCRIPTING_SETTING,
        ScriptService.DISABLE_DYNAMIC_SCRIPTING_DEFAULT);
            
    if (scriptingStatus.equalsIgnoreCase(ScriptService.DISABLE_DYNAMIC_SCRIPTING_DEFAULT)) {
    log.warn("{} has the default value {}, consider setting it to false if not needed",
            ScriptService.DISABLE_DYNAMIC_SCRIPTING_SETTING, scriptingStatus);
    }
            
    if (scriptingStatus.equalsIgnoreCase("true")) {
    log.error("{} is configured insecure, consider setting it to false or " + ScriptService.DISABLE_DYNAMIC_SCRIPTING_DEFAULT,
            ScriptService.DISABLE_DYNAMIC_SCRIPTING_SETTING);
    }*/
    if (searchService == null) {
        throw new RuntimeException("ssnull");
    }

    ArmorService.secretKey = sc;
}

From source file:org.apache.coheigea.bigdata.hdfs.HDFSTest.java

@org.junit.Test
public void defaultPermissionsTest() throws Exception {
    FileSystem fileSystem = hdfsCluster.getFileSystem();

    // Write a file
    final Path file = new Path("/tmp/tmpdir/data-file2");
    FSDataOutputStream out = fileSystem.create(file);
    for (int i = 0; i < 1024; ++i) {
        out.write(("data" + i + "\n").getBytes("UTF-8"));
        out.flush();/*from w w  w .j  av a2 s.  c  o m*/
    }
    out.close();

    // Check status
    // FileStatus status = fileSystem.getFileStatus(file);
    // System.out.println("OWNER: " + status.getOwner());
    // System.out.println("GROUP: " + status.getGroup());
    // System.out.println("PERM: " + status.getPermission().toString());
    // fileSystem.setPermission(file, new FsPermission(FsAction.READ, FsAction.NONE, FsAction.NONE));
    // fileSystem.setOwner(file, "bob", null);

    // Now try to read the file as "bob" - this should be allowed
    UserGroupInformation ugi = UserGroupInformation.createRemoteUser("bob");
    ugi.doAs(new PrivilegedExceptionAction<Void>() {

        public Void run() throws Exception {
            Configuration conf = new Configuration();
            conf.set("fs.defaultFS", defaultFs);

            FileSystem fs = FileSystem.get(conf);

            // Read the file
            FSDataInputStream in = fs.open(file);
            ByteArrayOutputStream output = new ByteArrayOutputStream();
            IOUtils.copy(in, output);
            String content = new String(output.toByteArray());
            Assert.assertTrue(content.startsWith("data0"));

            fs.close();
            return null;
        }
    });

    // Write to the file as the owner, this should be allowed
    out = fileSystem.append(file);
    out.write(("new data\n").getBytes("UTF-8"));
    out.flush();
    out.close();

    // Now try to write to the file as "bob" - this should not be allowed
    ugi.doAs(new PrivilegedExceptionAction<Void>() {

        public Void run() throws Exception {
            Configuration conf = new Configuration();
            conf.set("fs.defaultFS", defaultFs);

            FileSystem fs = FileSystem.get(conf);

            // Write to the file
            try {
                fs.append(file);
                Assert.fail("Failure expected on an incorrect permission");
            } catch (AccessControlException ex) {
                // expected
            }

            fs.close();
            return null;
        }
    });
}

From source file:org.apache.hadoop.hdfs.server.namenode.ImageServlet.java

@Override
public void doGet(final HttpServletRequest request, final HttpServletResponse response)
        throws ServletException, IOException {
    try {//w w w  . j  a  v  a 2 s.c  o m
        final ServletContext context = getServletContext();
        final FSImage nnImage = NameNodeHttpServer.getFsImageFromContext(context);
        final GetImageParams parsedParams = new GetImageParams(request, response);
        final Configuration conf = (Configuration) context.getAttribute(JspHelper.CURRENT_CONF);
        final NameNodeMetrics metrics = NameNode.getNameNodeMetrics();

        validateRequest(context, conf, request, response, nnImage, parsedParams.getStorageInfoString());

        UserGroupInformation.getCurrentUser().doAs(new PrivilegedExceptionAction<Void>() {
            @Override
            public Void run() throws Exception {
                if (parsedParams.isGetImage()) {
                    long txid = parsedParams.getTxId();
                    File imageFile = null;
                    String errorMessage = "Could not find image";
                    if (parsedParams.shouldFetchLatest()) {
                        imageFile = nnImage.getStorage().getHighestFsImageName();
                    } else {
                        errorMessage += " with txid " + txid;
                        imageFile = nnImage.getStorage().getFsImage(txid,
                                EnumSet.of(NameNodeFile.IMAGE, NameNodeFile.IMAGE_ROLLBACK));
                    }
                    if (imageFile == null) {
                        throw new IOException(errorMessage);
                    }
                    CheckpointFaultInjector.getInstance().beforeGetImageSetsHeaders();
                    long start = monotonicNow();
                    serveFile(imageFile);

                    if (metrics != null) { // Metrics non-null only when used inside name node
                        long elapsed = monotonicNow() - start;
                        metrics.addGetImage(elapsed);
                    }
                } else if (parsedParams.isGetEdit()) {
                    long startTxId = parsedParams.getStartTxId();
                    long endTxId = parsedParams.getEndTxId();

                    File editFile = nnImage.getStorage().findFinalizedEditsFile(startTxId, endTxId);
                    long start = monotonicNow();
                    serveFile(editFile);

                    if (metrics != null) { // Metrics non-null only when used inside name node
                        long elapsed = monotonicNow() - start;
                        metrics.addGetEdit(elapsed);
                    }
                }
                return null;
            }

            private void serveFile(File file) throws IOException {
                FileInputStream fis = new FileInputStream(file);
                try {
                    setVerificationHeadersForGet(response, file);
                    setFileNameHeaders(response, file);
                    if (!file.exists()) {
                        // Potential race where the file was deleted while we were in the
                        // process of setting headers!
                        throw new FileNotFoundException(file.toString());
                        // It's possible the file could be deleted after this point, but
                        // we've already opened the 'fis' stream.
                        // It's also possible length could change, but this would be
                        // detected by the client side as an inaccurate length header.
                    }
                    // send file
                    TransferFsImage.copyFileToStream(response.getOutputStream(), file, fis, getThrottler(conf));
                } finally {
                    IOUtils.closeStream(fis);
                }
            }
        });

    } catch (Throwable t) {
        String errMsg = "GetImage failed. " + StringUtils.stringifyException(t);
        response.sendError(HttpServletResponse.SC_GONE, errMsg);
        throw new IOException(errMsg);
    } finally {
        response.getOutputStream().close();
    }
}