List of usage examples for javax.security.sasl Sasl QOP
String QOP
To view the source code for javax.security.sasl Sasl QOP.
Click Source Link
From source file:org.apache.hadoop.io.crypto.bee.key.sasl.KeySaslClient.java
public KeySaslClient(KeyToken keyToken) throws SaslException { logger.debug("assign key token"); this.keyToken = keyToken; Map<String, String> propsClient = new TreeMap<String, String>(); propsClient.put(Sasl.QOP, "auth-conf"); saslCli = Sasl.createSaslClient(new String[] { "DIGEST-MD5" }, this.keyToken.getUser(), SaslUtil.KEY_SERVICE, SaslUtil.KEY_REALM, propsClient, new ClientCallbackHandler(this.keyToken)); saslAuthStatus = SaslUtil.SaslAuthStatus.AUTH_PROCESSING; }
From source file:org.apache.hadoop.io.crypto.bee.key.sasl.KeySaslServer.java
public KeySaslServer(UUID uuid, KeyToken keyToken) throws SaslException { this.uuid = uuid; this.keyToken = keyToken; propsServer.put(Sasl.QOP, "auth-conf,auth-int,auth"); saslSrv = Sasl.createSaslServer("DIGEST-MD5", SaslUtil.KEY_SERVICE, SaslUtil.KEY_REALM, propsServer, new ServerCallbackHandler(this.keyToken)); saslAuthStatus = SaslUtil.SaslAuthStatus.AUTH_PROCESSING; this.ttl = new java.util.Date().getTime() + SaslUtil.SASL_ENTRY_TTL; }
From source file:org.apache.hadoop.security.SaslInputStream.java
/** * Constructs a SASLInputStream from an InputStream and a SaslServer <br> * Note: if the specified InputStream or SaslServer is null, a * NullPointerException may be thrown later when they are used. * /*from w w w . j ava 2 s . com*/ * @param inStream * the InputStream to be processed * @param saslServer * an initialized SaslServer object */ public SaslInputStream(InputStream inStream, SaslServer saslServer) { this.inStream = new DataInputStream(inStream); this.saslServer = saslServer; this.saslClient = null; String qop = (String) saslServer.getNegotiatedProperty(Sasl.QOP); this.useWrap = qop != null && !"auth".equalsIgnoreCase(qop); }
From source file:org.apache.hadoop.security.SaslInputStream.java
/** * Constructs a SASLInputStream from an InputStream and a SaslClient <br> * Note: if the specified InputStream or SaslClient is null, a * NullPointerException may be thrown later when they are used. * /*w ww . ja va 2s. com*/ * @param inStream * the InputStream to be processed * @param saslClient * an initialized SaslClient object */ public SaslInputStream(InputStream inStream, SaslClient saslClient) { this.inStream = new DataInputStream(inStream); this.saslServer = null; this.saslClient = saslClient; String qop = (String) saslClient.getNegotiatedProperty(Sasl.QOP); this.useWrap = qop != null && !"auth".equalsIgnoreCase(qop); }
From source file:org.apache.hadoop.security.SaslRpcClient.java
/** * Do client side SASL authentication with server via the given InputStream * and OutputStream//from w ww. j ava2 s .c om * * @param inS * InputStream to use * @param outS * OutputStream to use * @return true if connection is set up, or false if needs to switch * to simple Auth. * @throws IOException */ public boolean saslConnect(InputStream inS, OutputStream outS) throws IOException { DataInputStream inStream = new DataInputStream(new BufferedInputStream(inS)); DataOutputStream outStream = new DataOutputStream(new BufferedOutputStream(outS)); try { byte[] saslToken = new byte[0]; if (saslClient.hasInitialResponse()) saslToken = saslClient.evaluateChallenge(saslToken); if (saslToken != null) { outStream.writeInt(saslToken.length); outStream.write(saslToken, 0, saslToken.length); outStream.flush(); if (LOG.isDebugEnabled()) LOG.debug("Have sent token of size " + saslToken.length + " from initSASLContext."); } if (!saslClient.isComplete()) { readStatus(inStream); int len = inStream.readInt(); if (len == SaslRpcServer.SWITCH_TO_SIMPLE_AUTH) { if (LOG.isDebugEnabled()) LOG.debug("Server asks us to fall back to simple auth."); saslClient.dispose(); return false; } saslToken = new byte[len]; if (LOG.isDebugEnabled()) LOG.debug("Will read input token of size " + saslToken.length + " for processing by initSASLContext"); inStream.readFully(saslToken); } while (!saslClient.isComplete()) { saslToken = saslClient.evaluateChallenge(saslToken); if (saslToken != null) { if (LOG.isDebugEnabled()) LOG.debug("Will send token of size " + saslToken.length + " from initSASLContext."); outStream.writeInt(saslToken.length); outStream.write(saslToken, 0, saslToken.length); outStream.flush(); } if (!saslClient.isComplete()) { readStatus(inStream); saslToken = new byte[inStream.readInt()]; if (LOG.isDebugEnabled()) LOG.debug("Will read input token of size " + saslToken.length + " for processing by initSASLContext"); inStream.readFully(saslToken); } } if (LOG.isDebugEnabled()) { LOG.debug("SASL client context established. Negotiated QoP: " + saslClient.getNegotiatedProperty(Sasl.QOP)); } return true; } catch (IOException e) { try { saslClient.dispose(); } catch (SaslException ignored) { // ignore further exceptions during cleanup } throw e; } }
From source file:org.apache.hadoop.security.SaslRpcServer.java
public static void init(Configuration conf) { QualityOfProtection saslQOP = QualityOfProtection.AUTHENTICATION; String rpcProtection = conf.get("hadoop.rpc.protection", QualityOfProtection.AUTHENTICATION.name().toLowerCase()); if (QualityOfProtection.INTEGRITY.name().toLowerCase().equals(rpcProtection)) { saslQOP = QualityOfProtection.INTEGRITY; } else if (QualityOfProtection.PRIVACY.name().toLowerCase().equals(rpcProtection)) { saslQOP = QualityOfProtection.PRIVACY; }//from w w w .j a v a 2s.c o m SASL_PROPS.put(Sasl.QOP, saslQOP.getSaslQop()); SASL_PROPS.put(Sasl.SERVER_AUTH, "true"); }
From source file:org.apache.hadoop.security.WhitelistBasedResolver.java
static Map<String, String> getSaslProperties(Configuration conf) { Map<String, String> saslProps = new TreeMap<String, String>(); String[] qop = conf.getStrings(HADOOP_RPC_PROTECTION_NON_WHITELIST, QualityOfProtection.PRIVACY.toString()); for (int i = 0; i < qop.length; i++) { qop[i] = QualityOfProtection.valueOf(StringUtils.toUpperCase(qop[i])).getSaslQop(); }/* ww w. j ava 2s. com*/ saslProps.put(Sasl.QOP, StringUtils.join(",", qop)); saslProps.put(Sasl.SERVER_AUTH, "true"); return saslProps; }
From source file:org.apache.hive.jdbc.HiveConnection.java
/** * Create transport per the connection options * Supported transport options are://from w w w .j ava 2 s . c o m * - SASL based transports over * + Kerberos * + Delegation token * + SSL * + non-SSL * - Raw (non-SASL) socket * * Kerberos and Delegation token supports SASL QOP configurations * @throws SQLException, TTransportException */ private TTransport createBinaryTransport() throws SQLException, TTransportException { try { TTransport socketTransport = createUnderlyingTransport(); // handle secure connection if specified if (!JdbcConnectionParams.AUTH_SIMPLE.equals(sessConfMap.get(JdbcConnectionParams.AUTH_TYPE))) { // If Kerberos Map<String, String> saslProps = new HashMap<String, String>(); SaslQOP saslQOP = SaslQOP.AUTH; if (sessConfMap.containsKey(JdbcConnectionParams.AUTH_QOP)) { try { saslQOP = SaslQOP.fromString(sessConfMap.get(JdbcConnectionParams.AUTH_QOP)); } catch (IllegalArgumentException e) { throw new SQLException( "Invalid " + JdbcConnectionParams.AUTH_QOP + " parameter. " + e.getMessage(), "42000", e); } saslProps.put(Sasl.QOP, saslQOP.toString()); } else { // If the client did not specify qop then just negotiate the one supported by server saslProps.put(Sasl.QOP, "auth-conf,auth-int,auth"); } saslProps.put(Sasl.SERVER_AUTH, "true"); if (sessConfMap.containsKey(JdbcConnectionParams.AUTH_PRINCIPAL)) { transport = KerberosSaslHelper.getKerberosTransport( sessConfMap.get(JdbcConnectionParams.AUTH_PRINCIPAL), host, socketTransport, saslProps, assumeSubject); } else { // If there's a delegation token available then use token based connection String tokenStr = getClientDelegationToken(sessConfMap); if (tokenStr != null) { transport = KerberosSaslHelper.getTokenTransport(tokenStr, host, socketTransport, saslProps); } else { // we are using PLAIN Sasl connection with user/password String userName = getUserName(); String passwd = getPassword(); // Overlay the SASL transport on top of the base socket transport (SSL or non-SSL) transport = PlainSaslHelper.getPlainTransport(userName, passwd, socketTransport); } } } else { // Raw socket connection (non-sasl) transport = socketTransport; } } catch (SaslException e) { throw new SQLException("Could not create secure connection to " + jdbcUriString + ": " + e.getMessage(), " 08S01", e); } return transport; }
From source file:org.apache.hive.service.auth.HiveAuthFactory.java
public Map<String, String> getSaslProperties() { Map<String, String> saslProps = new HashMap<String, String>(); SaslQOP saslQOP = SaslQOP.fromString(conf.getVar(ConfVars.HIVE_SERVER2_THRIFT_SASL_QOP)); saslProps.put(Sasl.QOP, saslQOP.toString()); saslProps.put(Sasl.SERVER_AUTH, "true"); return saslProps; }
From source file:org.apache.hive.spark.client.rpc.RpcConfiguration.java
/** * SASL options are namespaced under "hive.spark.client.rpc.sasl.*"; each option is the * lower-case version of the constant in the "javax.security.sasl.Sasl" class (e.g. "strength" * for cipher strength).// w ww . j a v a 2 s . c o m */ Map<String, String> getSaslOptions() { Map<String, String> opts = new HashMap<String, String>(); Map<String, String> saslOpts = ImmutableMap.<String, String>builder().put(Sasl.CREDENTIALS, "credentials") .put(Sasl.MAX_BUFFER, "max_buffer").put(Sasl.POLICY_FORWARD_SECRECY, "policy_forward_secrecy") .put(Sasl.POLICY_NOACTIVE, "policy_noactive").put(Sasl.POLICY_NOANONYMOUS, "policy_noanonymous") .put(Sasl.POLICY_NODICTIONARY, "policy_nodictionary") .put(Sasl.POLICY_NOPLAINTEXT, "policy_noplaintext") .put(Sasl.POLICY_PASS_CREDENTIALS, "policy_pass_credentials").put(Sasl.QOP, "qop") .put(Sasl.RAW_SEND_SIZE, "raw_send_size").put(Sasl.REUSE, "reuse") .put(Sasl.SERVER_AUTH, "server_auth").put(Sasl.STRENGTH, "strength").build(); for (Map.Entry<String, String> e : saslOpts.entrySet()) { String value = config.get(RPC_SASL_OPT_PREFIX + e.getValue()); if (value != null) { opts.put(e.getKey(), value); } } return opts; }