Example usage for org.apache.hadoop.security UserGroupInformation getShortUserName

List of usage examples for org.apache.hadoop.security UserGroupInformation getShortUserName

Introduction

In this page you can find the example usage for org.apache.hadoop.security UserGroupInformation getShortUserName.

Prototype

public String getShortUserName() 

Source Link

Document

Get the user's login name.

Usage

From source file:org.apache.drill.exec.rpc.security.kerberos.KerberosFactory.java

License:Apache License

@Override
public SaslServer createSaslServer(final UserGroupInformation ugi, final Map<String, ?> properties)
        throws SaslException {
    try {//from   ww w  . jav  a 2 s.c o m
        final String primaryName = ugi.getShortUserName();
        final String instanceName = new HadoopKerberosName(ugi.getUserName()).getHostName();

        final SaslServer saslServer = ugi.doAs(new PrivilegedExceptionAction<SaslServer>() {
            @Override
            public SaslServer run() throws Exception {
                return FastSaslServerFactory.getInstance().createSaslServer(KerberosUtil.KERBEROS_SASL_NAME,
                        primaryName, instanceName, properties, new KerberosServerCallbackHandler());
            }
        });
        logger.trace("GSSAPI SaslServer created.");
        return saslServer;
    } catch (final UndeclaredThrowableException e) {
        final Throwable cause = e.getCause();
        logger.debug("Authentication failed.", cause);
        if (cause instanceof SaslException) {
            throw (SaslException) cause;
        } else {
            throw new SaslException("Unexpected failure trying to authenticate using Kerberos", cause);
        }
    } catch (final IOException | InterruptedException e) {
        logger.debug("Authentication failed.", e);
        throw new SaslException("Unexpected failure trying to authenticate using Kerberos", e);
    }
}

From source file:org.apache.drill.exec.server.rest.spnego.TestSpnegoConfig.java

License:Apache License

/**
 * Valid Configuration with both keytab & principal
 * @throws Exception// w w w.  j  av  a2s.  com
 */
@Test
public void testValidSpnegoConfig() throws Exception {

    try {
        final DrillConfig newConfig = new DrillConfig(DrillConfig.create()
                .withValue(ExecConstants.USER_AUTHENTICATION_ENABLED, ConfigValueFactory.fromAnyRef(true))
                .withValue(ExecConstants.AUTHENTICATION_MECHANISMS,
                        ConfigValueFactory.fromIterable(Lists.newArrayList("plain")))
                .withValue(ExecConstants.HTTP_SPNEGO_PRINCIPAL,
                        ConfigValueFactory.fromAnyRef(spnegoHelper.SERVER_PRINCIPAL))
                .withValue(ExecConstants.HTTP_SPNEGO_KEYTAB,
                        ConfigValueFactory.fromAnyRef(spnegoHelper.serverKeytab.toString()))
                .withValue(ExecConstants.USER_AUTHENTICATOR_IMPL,
                        ConfigValueFactory.fromAnyRef(UserAuthenticatorTestImpl.TYPE)));

        final SpnegoConfig spnegoConfig = new SpnegoConfig(newConfig);
        spnegoConfig.validateSpnegoConfig();
        UserGroupInformation ugi = spnegoConfig.getLoggedInUgi();
        assertEquals(primaryName, ugi.getShortUserName());
        assertEquals(spnegoHelper.SERVER_PRINCIPAL, ugi.getUserName());
    } catch (Exception ex) {
        fail();
    }
}

From source file:org.apache.drill.exec.store.parquet.metadata.Metadata.java

License:Apache License

/**
 * Get the metadata for a single file/*from www .  j  a  v  a 2 s . c o m*/
 */
private ParquetFileMetadata_v3 getParquetFileMetadata_v3(ParquetTableMetadata_v3 parquetTableMetadata,
        final FileStatus file, final FileSystem fs, boolean allColumns, Set<String> columnSet)
        throws IOException, InterruptedException {
    final ParquetMetadata metadata;
    final UserGroupInformation processUserUgi = ImpersonationUtil.getProcessUserUGI();
    final Configuration conf = new Configuration(fs.getConf());
    try {
        metadata = processUserUgi.doAs((PrivilegedExceptionAction<ParquetMetadata>) () -> {
            try (ParquetFileReader parquetFileReader = ParquetFileReader
                    .open(HadoopInputFile.fromStatus(file, conf), readerConfig.toReadOptions())) {
                return parquetFileReader.getFooter();
            }
        });
    } catch (Exception e) {
        logger.error(
                "Exception while reading footer of parquet file [Details - path: {}, owner: {}] as process user {}",
                file.getPath(), file.getOwner(), processUserUgi.getShortUserName(), e);
        throw e;
    }

    MessageType schema = metadata.getFileMetaData().getSchema();

    Map<SchemaPath, ColTypeInfo> colTypeInfoMap = new HashMap<>();
    schema.getPaths();
    for (String[] path : schema.getPaths()) {
        colTypeInfoMap.put(SchemaPath.getCompoundPath(path), getColTypeInfo(schema, schema, path, 0));
    }

    List<RowGroupMetadata_v3> rowGroupMetadataList = Lists.newArrayList();

    ArrayList<SchemaPath> ALL_COLS = new ArrayList<>();
    ALL_COLS.add(SchemaPath.STAR_COLUMN);
    ParquetReaderUtility.DateCorruptionStatus containsCorruptDates = ParquetReaderUtility
            .detectCorruptDates(metadata, ALL_COLS, readerConfig.autoCorrectCorruptedDates());
    logger.debug("Contains corrupt dates: {}.", containsCorruptDates);

    for (BlockMetaData rowGroup : metadata.getBlocks()) {
        List<ColumnMetadata_v3> columnMetadataList = new ArrayList<>();
        long length = 0;
        for (ColumnChunkMetaData col : rowGroup.getColumns()) {
            String[] columnName = col.getPath().toArray();
            SchemaPath columnSchemaName = SchemaPath.getCompoundPath(columnName);
            ColTypeInfo colTypeInfo = colTypeInfoMap.get(columnSchemaName);

            ColumnTypeMetadata_v3 columnTypeMetadata = new ColumnTypeMetadata_v3(columnName,
                    col.getPrimitiveType().getPrimitiveTypeName(), colTypeInfo.originalType,
                    colTypeInfo.precision, colTypeInfo.scale, colTypeInfo.repetitionLevel,
                    colTypeInfo.definitionLevel);

            if (parquetTableMetadata.columnTypeInfo == null) {
                parquetTableMetadata.columnTypeInfo = new ConcurrentHashMap<>();
            }
            parquetTableMetadata.columnTypeInfo.put(new ColumnTypeMetadata_v3.Key(columnTypeMetadata.name),
                    columnTypeMetadata);
            // Store column metadata only if allColumns is set to true or if the column belongs to the subset of columns specified in the refresh command
            if (allColumns || columnSet == null || !allColumns && columnSet != null && columnSet.size() > 0
                    && columnSet.contains(columnSchemaName.getRootSegmentPath())) {
                Statistics<?> stats = col.getStatistics();
                // Save the column schema info. We'll merge it into one list
                Object minValue = null;
                Object maxValue = null;
                long numNulls = -1;
                boolean statsAvailable = stats != null && !stats.isEmpty();
                if (statsAvailable) {
                    if (stats.hasNonNullValue()) {
                        minValue = stats.genericGetMin();
                        maxValue = stats.genericGetMax();
                        if (containsCorruptDates == ParquetReaderUtility.DateCorruptionStatus.META_SHOWS_CORRUPTION
                                && columnTypeMetadata.originalType == OriginalType.DATE) {
                            minValue = ParquetReaderUtility.autoCorrectCorruptedDate((Integer) minValue);
                            maxValue = ParquetReaderUtility.autoCorrectCorruptedDate((Integer) maxValue);
                        }
                    }
                    numNulls = stats.getNumNulls();
                }
                ColumnMetadata_v3 columnMetadata = new ColumnMetadata_v3(columnTypeMetadata.name,
                        col.getPrimitiveType().getPrimitiveTypeName(), minValue, maxValue, numNulls);
                columnMetadataList.add(columnMetadata);
            }
            length += col.getTotalSize();
        }

        // DRILL-5009: Skip the RowGroup if it is empty
        // Note we still read the schema even if there are no values in the RowGroup
        if (rowGroup.getRowCount() == 0) {
            continue;
        }
        RowGroupMetadata_v3 rowGroupMeta = new RowGroupMetadata_v3(rowGroup.getStartingPos(), length,
                rowGroup.getRowCount(), getHostAffinity(file, fs, rowGroup.getStartingPos(), length),
                columnMetadataList);

        rowGroupMetadataList.add(rowGroupMeta);
    }
    Path path = Path.getPathWithoutSchemeAndAuthority(file.getPath());

    return new ParquetFileMetadata_v3(path, file.getLen(), rowGroupMetadataList);
}

From source file:org.apache.falcon.hadoop.HadoopClientFactory.java

License:Apache License

/**
 * Return a FileSystem created with the provided user for the specified URI.
 *
 * @param ugi user group information/*w  w w .  j a v  a 2s  .c o  m*/
 * @param uri  file system URI.
 * @param conf Configuration with all necessary information to create the FileSystem.
 * @return FileSystem created with the provided user/group.
 * @throws org.apache.falcon.FalconException
 *          if the filesystem could not be created.
 */
@SuppressWarnings("ResultOfMethodCallIgnored")
public FileSystem createFileSystem(UserGroupInformation ugi, final URI uri, final Configuration conf)
        throws FalconException {
    validateInputs(ugi, uri, conf);

    try {
        // prevent falcon impersonating falcon, no need to use doas
        final String proxyUserName = ugi.getShortUserName();
        if (proxyUserName.equals(UserGroupInformation.getLoginUser().getShortUserName())) {
            LOG.trace("Creating FS for the login user {}, impersonation not required", proxyUserName);
            return FileSystem.get(uri, conf);
        }

        LOG.trace("Creating FS impersonating user {}", proxyUserName);
        return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
            public FileSystem run() throws Exception {
                return FileSystem.get(uri, conf);
            }
        });
    } catch (InterruptedException | IOException ex) {
        throw new FalconException("Exception creating FileSystem:" + ex.getMessage(), ex);
    }
}

From source file:org.apache.falcon.hadoop.HadoopClientFactory.java

License:Apache License

/**
 * Return a DistributedFileSystem created with the provided user for the specified URI.
 *
 * @param ugi user group information//from ww w  .j a va2s. co m
 * @param uri  file system URI.
 * @param conf Configuration with all necessary information to create the FileSystem.
 * @return DistributedFileSystem created with the provided user/group.
 * @throws org.apache.falcon.FalconException
 *          if the filesystem could not be created.
 */
@SuppressWarnings("ResultOfMethodCallIgnored")
public DistributedFileSystem createDistributedFileSystem(UserGroupInformation ugi, final URI uri,
        final Configuration conf) throws FalconException {
    validateInputs(ugi, uri, conf);
    FileSystem returnFs;
    try {
        // prevent falcon impersonating falcon, no need to use doas
        final String proxyUserName = ugi.getShortUserName();
        if (proxyUserName.equals(UserGroupInformation.getLoginUser().getShortUserName())) {
            LOG.info("Creating Distributed FS for the login user {}, impersonation not required",
                    proxyUserName);
            returnFs = DistributedFileSystem.get(uri, conf);
        } else {
            LOG.info("Creating FS impersonating user {}", proxyUserName);
            returnFs = ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
                public FileSystem run() throws Exception {
                    return DistributedFileSystem.get(uri, conf);
                }
            });
        }

        return (DistributedFileSystem) returnFs;
    } catch (InterruptedException | IOException ex) {
        throw new FalconException("Exception creating FileSystem:" + ex.getMessage(), ex);
    }
}

From source file:org.apache.falcon.hadoop.HadoopClientFactory.java

License:Apache License

private void validateInputs(UserGroupInformation ugi, final URI uri, final Configuration conf)
        throws FalconException {
    Validate.notNull(ugi, "ugi cannot be null");
    Validate.notNull(conf, "configuration cannot be null");

    try {/*ww w.  ja  v  a  2  s.  com*/
        if (UserGroupInformation.isSecurityEnabled()) {
            LOG.debug("Revalidating Auth Token with auth method {}",
                    UserGroupInformation.getLoginUser().getAuthenticationMethod().name());
            UserGroupInformation.getLoginUser().checkTGTAndReloginFromKeytab();
        }
    } catch (IOException ioe) {
        throw new FalconException(
                "Exception while getting FileSystem. Unable to check TGT for user " + ugi.getShortUserName(),
                ioe);
    }

    validateNameNode(uri, conf);
}

From source file:org.apache.falcon.hive.util.EventUtils.java

License:Apache License

public void setupConnection() throws Exception {
    Class.forName(DRIVER_NAME);// ww w  . j a v  a 2 s. com
    DriverManager.setLoginTimeout(TIMEOUT_IN_SECS);
    String authTokenString = ";auth=delegationToken";
    //To bypass findbugs check, need to store empty password in Properties.
    Properties password = new Properties();
    password.put("password", "");
    String user = "";

    UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
    if (currentUser != null) {
        user = currentUser.getShortUserName();
    }

    if (conf.get(HiveDRArgs.EXECUTION_STAGE.getName())
            .equalsIgnoreCase(HiveDRUtils.ExecutionStage.EXPORT.name())) {
        String authString = null;
        if (StringUtils.isNotEmpty(conf.get(HiveDRArgs.SOURCE_HIVE2_KERBEROS_PRINCIPAL.getName()))) {
            authString = authTokenString;
        }

        String connString = getSourceHS2ConnectionUrl(authString);
        sourceConnection = DriverManager.getConnection(connString, user, password.getProperty("password"));
        sourceStatement = sourceConnection.createStatement();
    } else {
        String authString = null;
        if (StringUtils.isNotEmpty(conf.get(HiveDRArgs.TARGET_HIVE2_KERBEROS_PRINCIPAL.getName()))) {
            authString = authTokenString;
        }
        String connString = getTargetHS2ConnectionUrl(authString);
        targetConnection = DriverManager.getConnection(connString, user, password.getProperty("password"));
        targetStatement = targetConnection.createStatement();
    }
}

From source file:org.apache.falcon.recipe.RecipeTool.java

License:Apache License

private FileSystem createFileSystem(UserGroupInformation ugi, final URI uri, final Configuration conf)
        throws Exception {
    try {/*from  www  .ja  v a2s .c  o m*/
        final String proxyUserName = ugi.getShortUserName();
        if (proxyUserName.equals(UserGroupInformation.getLoginUser().getShortUserName())) {
            return FileSystem.get(uri, conf);
        }

        return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
            public FileSystem run() throws Exception {
                return FileSystem.get(uri, conf);
            }
        });
    } catch (InterruptedException ex) {
        throw new IOException("Exception creating FileSystem:" + ex.getMessage(), ex);
    }
}

From source file:org.apache.falcon.regression.core.util.HiveUtil.java

License:Apache License

public static Connection getHiveJdbcConnection(final String jdbcUrl, final String user, final String password,
        final String hivePrincipal)
        throws ClassNotFoundException, SQLException, IOException, InterruptedException {
    final String transportMode = new HiveConf().get("hive.server2.transport.mode", "binary");
    String connectionStringSuffix = "";
    if (transportMode.equalsIgnoreCase("http")) {
        connectionStringSuffix += "transportMode=http;httpPath=cliservice;";
    }//from   w w w .j  a v a 2 s  .  com
    if (MerlinConstants.IS_SECURE) {
        connectionStringSuffix += String.format("principal=%s;kerberosAuthType=fromSubject;", hivePrincipal);
    }
    final String connectionStringSuffix2 = connectionStringSuffix;
    final UserGroupInformation ugi = KerberosHelper.getUGI(user);
    final Connection conn = ugi.doAs(new PrivilegedExceptionAction<Connection>() {
        @Override
        public Connection run() throws Exception {
            Class.forName(DRIVER_NAME);
            return DriverManager.getConnection(jdbcUrl + "/;" + connectionStringSuffix2, ugi.getShortUserName(),
                    password);
        }
    });

    return conn;
}

From source file:org.apache.falcon.security.DefaultAuthorizationProvider.java

License:Apache License

/**
 * Determines if the authenticated user is the user who started this process
 * or belongs to the super user group.//from  w ww . ja v a2  s . c o  m
 *
 * @param authenticatedUGI UGI
 * @return true if super user else false.
 */
public boolean isSuperUser(UserGroupInformation authenticatedUGI) {
    return SUPER_USER.equals(authenticatedUGI.getShortUserName())
            || (!StringUtils.isEmpty(superUserGroup) && isUserInGroup(superUserGroup, authenticatedUGI));
}