Example usage for java.security PrivilegedExceptionAction PrivilegedExceptionAction

List of usage examples for java.security PrivilegedExceptionAction PrivilegedExceptionAction

Introduction

In this page you can find the example usage for java.security PrivilegedExceptionAction PrivilegedExceptionAction.

Prototype

PrivilegedExceptionAction

Source Link

Usage

From source file:org.apache.hadoop.hdfs.server.namenode.TestSubtreeLockACL.java

@Test
public void testSubtreeMoveBlockedByInheritedDefaultAcl() throws IOException, InterruptedException {
    try {// w w  w.j ava  2  s  . co  m
        setup();

        //Deny access via default acl down subtree1
        setReadOnlyUserDefaultAcl(user2.getShortUserName(), subtree1);

        FileSystem user2fs = user2.doAs(new PrivilegedExceptionAction<FileSystem>() {
            @Override
            public FileSystem run() throws Exception {
                return FileSystem.get(conf);
            }
        });

        try {
            //Try to move level2folder1 under subtree2. Should fail because of inherited acl in level1folder1.
            user2fs.rename(level2folder1, new Path(subtree2, "newname"));
            fail("Acl should block move");
        } catch (AccessControlException expected) {
            assertTrue("Wrong inode triggered access control exception.",
                    expected.getMessage().contains("inode=\"/subtrees/subtree1/level1folder1\""));
            //Operation should fail.
        }

    } finally {
        teardown();
    }
}

From source file:org.apache.hadoop.mapred.JobHistoryServer.java

/**
 * Start embedded jetty server to host history servlets/pages
 *  - Push history file system, acl Manager and cluster conf for future
 *    reference by the servlets/pages/*from  w  ww  . jav a  2 s  . c o  m*/
 *
 * @param conf - Cluster configuration
 * @param aclsManager - ACLs Manager for validating user request
 * @throws IOException - Any exception while starting web server
 */
private void initializeWebServer(final JobConf conf, ACLsManager aclsManager) throws IOException {

    this.conf = conf;

    FileSystem fs;
    try {
        fs = aclsManager.getMROwner().doAs(new PrivilegedExceptionAction<FileSystem>() {
            public FileSystem run() throws IOException {
                return FileSystem.get(conf);
            }
        });
    } catch (InterruptedException e) {
        throw new IOException("Operation interrupted", e);
    }

    if (!isEmbedded(conf)) {
        JobHistory.initDone(conf, fs, false);
    }
    final String historyLogDir = JobHistory.getCompletedJobHistoryLocation().toString();
    FileSystem historyFS = new Path(historyLogDir).getFileSystem(conf);

    historyServer.setAttribute("historyLogDir", historyLogDir);
    historyServer.setAttribute("fileSys", historyFS);
    historyServer.setAttribute("jobConf", conf);
    historyServer.setAttribute("aclManager", aclsManager);

    historyServer.addServlet("historyfile", "/historyfile", RawHistoryFileServlet.class);
}

From source file:org.apache.hadoop.gateway.identityasserter.function.UsernameFunctionProcessorTest.java

@Test
public void testResolve() throws Exception {
    final UsernameFunctionProcessor processor = new UsernameFunctionProcessor();
    assertThat(processor.resolve(null, null), nullValue());
    assertThat(processor.resolve(null, Arrays.asList("test-input")), contains("test-input"));
    Subject subject = new Subject();
    subject.getPrincipals().add(new PrimaryPrincipal("test-username"));
    subject.setReadOnly();/*  w  w  w.  j  a v  a2s. c om*/
    Subject.doAs(subject, new PrivilegedExceptionAction<Object>() {
        @Override
        public Object run() throws Exception {
            assertThat(processor.resolve(null, null), contains("test-username"));
            assertThat(processor.resolve(null, Arrays.asList("test-ignored")), contains("test-username"));
            return null;
        }
    });
}

From source file:org.apache.hadoop.hive.metastore.HiveProtoEventsCleanerTask.java

/**
 * Delete the events dir with it's owner as proxy user.
 *//*from   ww  w . j av a 2  s  .  c o  m*/
private void deleteDirByOwner(FileSystem fs, FileStatus eventsDir) throws IOException {
    String owner = eventsDir.getOwner();
    if (owner.equals(System.getProperty("user.name"))) {
        fs.delete(eventsDir.getPath(), true);
    } else {
        LOG.info("Deleting " + eventsDir.getPath() + " as user " + owner);
        UserGroupInformation ugi = UserGroupInformation.createProxyUser(owner,
                UserGroupInformation.getLoginUser());
        try {
            ugi.doAs(new PrivilegedExceptionAction<Object>() {
                @Override
                public Object run() throws Exception {
                    // New FileSystem object to be obtained in user context for doAs flow.
                    try (FileSystem doAsFs = FileSystem.newInstance(eventsDir.getPath().toUri(), conf)) {
                        doAsFs.delete(eventsDir.getPath(), true);
                    }
                    return null;
                }
            });
        } catch (InterruptedException ie) {
            LOG.error("Could not delete " + eventsDir.getPath() + " for UGI: " + ugi, ie);
        }
    }
}

From source file:org.apache.hadoop.hdfs.nfs.nfs3.DFSClientCache.java

private CacheLoader<String, DFSClient> clientLoader() {
    return new CacheLoader<String, DFSClient>() {
        @Override//from  w w  w. j  a va 2  s.co  m
        public DFSClient load(String userName) throws Exception {
            UserGroupInformation ugi = getUserGroupInformation(userName, UserGroupInformation.getCurrentUser());

            // Guava requires CacheLoader never returns null.
            return ugi.doAs(new PrivilegedExceptionAction<DFSClient>() {
                @Override
                public DFSClient run() throws IOException {
                    return new DFSClient(NameNode.getAddress(config), config);
                }
            });
        }
    };
}

From source file:org.apache.hadoop.crypto.key.kms.server.KMS.java

@POST
@Path(KMSRESTConstants.KEYS_RESOURCE)
@Consumes(MediaType.APPLICATION_JSON)//  w  w  w  .j  a va 2s .c  o  m
@Produces(MediaType.APPLICATION_JSON)
@SuppressWarnings("unchecked")
public Response createKey(Map jsonKey) throws Exception {
    try {
        LOG.trace("Entering createKey Method.");
        KMSWebApp.getAdminCallsMeter().mark();
        UserGroupInformation user = HttpUserGroupInformation.get();
        final String name = (String) jsonKey.get(KMSRESTConstants.NAME_FIELD);
        KMSClientProvider.checkNotEmpty(name, KMSRESTConstants.NAME_FIELD);
        assertAccess(KMSACLs.Type.CREATE, user, KMSOp.CREATE_KEY, name);
        String cipher = (String) jsonKey.get(KMSRESTConstants.CIPHER_FIELD);
        final String material;
        material = (String) jsonKey.get(KMSRESTConstants.MATERIAL_FIELD);
        int length = (jsonKey.containsKey(KMSRESTConstants.LENGTH_FIELD))
                ? (Integer) jsonKey.get(KMSRESTConstants.LENGTH_FIELD)
                : 0;
        String description = (String) jsonKey.get(KMSRESTConstants.DESCRIPTION_FIELD);
        LOG.debug(
                "Creating key with name {}, cipher being used{}, " + "length of key {}, description of key {}",
                name, cipher, length, description);
        Map<String, String> attributes = (Map<String, String>) jsonKey.get(KMSRESTConstants.ATTRIBUTES_FIELD);
        if (material != null) {
            assertAccess(KMSACLs.Type.SET_KEY_MATERIAL, user, KMSOp.CREATE_KEY, name);
        }
        final KeyProvider.Options options = new KeyProvider.Options(KMSWebApp.getConfiguration());
        if (cipher != null) {
            options.setCipher(cipher);
        }
        if (length != 0) {
            options.setBitLength(length);
        }
        options.setDescription(description);
        options.setAttributes(attributes);

        KeyProvider.KeyVersion keyVersion = user.doAs(new PrivilegedExceptionAction<KeyVersion>() {
            @Override
            public KeyVersion run() throws Exception {
                KeyProvider.KeyVersion keyVersion = (material != null)
                        ? provider.createKey(name, Base64.decodeBase64(material), options)
                        : provider.createKey(name, options);
                provider.flush();
                return keyVersion;
            }
        });

        kmsAudit.ok(user, KMSOp.CREATE_KEY, name,
                "UserProvidedMaterial:" + (material != null) + " Description:" + description);

        if (!KMSWebApp.getACLs().hasAccess(KMSACLs.Type.GET, user)) {
            keyVersion = removeKeyMaterial(keyVersion);
        }
        Map json = KMSServerJSONUtils.toJSON(keyVersion);
        String requestURL = KMSMDCFilter.getURL();
        int idx = requestURL.lastIndexOf(KMSRESTConstants.KEYS_RESOURCE);
        requestURL = requestURL.substring(0, idx);
        LOG.trace("Exiting createKey Method.");
        return Response.created(getKeyURI(KMSRESTConstants.SERVICE_VERSION, name))
                .type(MediaType.APPLICATION_JSON).header("Location", getKeyURI(requestURL, name)).entity(json)
                .build();
    } catch (Exception e) {
        LOG.debug("Exception in createKey.", e);
        throw e;
    }
}

From source file:org.apache.drill.exec.store.hive.HiveScan.java

private void getSplitsWithUGI() throws ExecutionSetupException {
    final UserGroupInformation ugi = ImpersonationUtil.createProxyUgi(getUserName());
    try {// w  w  w.j a v  a 2 s .co  m
        ugi.doAs(new PrivilegedExceptionAction<Void>() {
            public Void run() throws Exception {
                getSplits();
                return null;
            }
        });
    } catch (final InterruptedException | IOException e) {
        final String errMsg = String.format("Failed to create input splits: %s", e.getMessage());
        logger.error(errMsg, e);
        throw new DrillRuntimeException(errMsg, e);
    }
}

From source file:org.apache.hadoop.mapred.TestJobACLs.java

/**
 * Submits a sleep job with 1 map task that runs for a long time(2000 sec)
 * @param clusterConf//w  ww. j av a 2 s  .  c om
 * @param user the jobOwner
 * @return RunningJob that is started
 * @throws IOException
 * @throws InterruptedException
 */
private RunningJob submitJobAsUser(final JobConf clusterConf, String user)
        throws IOException, InterruptedException {
    UserGroupInformation ugi = UserGroupInformation.createUserForTesting(user, new String[] {});
    RunningJob job = (RunningJob) ugi.doAs(new PrivilegedExceptionAction<Object>() {
        @Override
        public Object run() throws Exception {
            JobClient jobClient = new JobClient(clusterConf);
            SleepJob sleepJob = new SleepJob();
            sleepJob.setConf(clusterConf);
            JobConf jobConf = sleepJob.setupJobConf(1, 0, 2000, 1000, 1000, 1000);
            RunningJob runningJob = jobClient.submitJob(jobConf);
            return runningJob;
        }
    });
    return job;
}

From source file:org.apache.hadoop.hbase.thrift2.TestThriftHBaseServiceHandlerWithLabels.java

private static void createLabels() throws IOException, InterruptedException {
    PrivilegedExceptionAction<VisibilityLabelsResponse> action = new PrivilegedExceptionAction<VisibilityLabelsResponse>() {
        public VisibilityLabelsResponse run() throws Exception {
            String[] labels = { SECRET, CONFIDENTIAL, PRIVATE, PUBLIC, TOPSECRET };
            try {
                VisibilityClient.addLabels(conf, labels);
            } catch (Throwable t) {
                throw new IOException(t);
            }/*from w  w  w.  j  a v a 2 s. co m*/
            return null;
        }
    };
    SUPERUSER.runAs(action);
}

From source file:org.apache.axis2.builder.XFormURLEncodedBuilder.java

protected void extractParametersFromRequest(MultipleEntryHashMap parameterMap, String query,
        String queryParamSeparator, final String charsetEncoding, final InputStream inputStream)
        throws AxisFault {

    if (query != null && !"".equals(query)) {

        String parts[] = query.split(queryParamSeparator);
        for (int i = 0; i < parts.length; i++) {
            int separator = parts[i].indexOf("=");
            if (separator > 0) {
                String value = parts[i].substring(separator + 1);
                try {
                    value = URIEncoderDecoder.decode(value);
                } catch (UnsupportedEncodingException e) {
                    throw AxisFault.makeFault(e);
                }/*from  w  w w  . j  a v a 2 s . c  o m*/

                parameterMap.put(parts[i].substring(0, separator), value);
            }
        }

    }

    if (inputStream != null) {
        try {
            InputStreamReader inputStreamReader = null;
            try {
                inputStreamReader = (InputStreamReader) AccessController
                        .doPrivileged(new PrivilegedExceptionAction() {
                            public Object run() throws UnsupportedEncodingException {
                                return new InputStreamReader(inputStream, charsetEncoding);
                            }
                        });
            } catch (PrivilegedActionException e) {
                throw (UnsupportedEncodingException) e.getException();
            }
            BufferedReader bufferedReader = new BufferedReader(inputStreamReader);
            while (true) {
                String line = bufferedReader.readLine();
                if (line != null) {
                    String parts[] = line
                            .split(WSDL20DefaultValueHolder.ATTR_WHTTP_QUERY_PARAMETER_SEPARATOR_DEFAULT);
                    for (int i = 0; i < parts.length; i++) {
                        int separator = parts[i].indexOf("=");
                        String value = parts[i].substring(separator + 1);
                        parameterMap.put(parts[i].substring(0, separator), URIEncoderDecoder.decode(value));
                    }
                } else {
                    break;
                }
            }
        } catch (IOException e) {
            throw AxisFault.makeFault(e);
        }
    }
}