Example usage for org.apache.commons.compress.utils CharsetNames UTF_8

List of usage examples for org.apache.commons.compress.utils CharsetNames UTF_8

Introduction

In this page you can find the example usage for org.apache.commons.compress.utils CharsetNames UTF_8.

Prototype

String UTF_8

To view the source code for org.apache.commons.compress.utils CharsetNames UTF_8.

Click Source Link

Document

Eight-bit Unicode Transformation Format.

Usage

From source file:org.apache.hadoop.hive.ql.exec.spark.HiveSparkClientFactory.java

public static Map<String, String> initiateSparkConf(HiveConf hiveConf) {
    Map<String, String> sparkConf = new HashMap<String, String>();
    HBaseConfiguration.addHbaseResources(hiveConf);

    // set default spark configurations.
    sparkConf.put("spark.master", SPARK_DEFAULT_MASTER);
    final String appNameKey = "spark.app.name";
    String appName = hiveConf.get(appNameKey);
    if (appName == null) {
        appName = SPARK_DEFAULT_APP_NAME;
    }/*from   ww w  .j a  v  a2 s  .c o m*/
    sparkConf.put(appNameKey, appName);
    sparkConf.put("spark.serializer", SPARK_DEFAULT_SERIALIZER);
    sparkConf.put("spark.kryo.referenceTracking", SPARK_DEFAULT_REFERENCE_TRACKING);

    // load properties from spark-defaults.conf.
    InputStream inputStream = null;
    try {
        inputStream = HiveSparkClientFactory.class.getClassLoader()
                .getResourceAsStream(SPARK_DEFAULT_CONF_FILE);
        if (inputStream != null) {
            LOG.info("loading spark properties from:" + SPARK_DEFAULT_CONF_FILE);
            Properties properties = new Properties();
            properties.load(new InputStreamReader(inputStream, CharsetNames.UTF_8));
            for (String propertyName : properties.stringPropertyNames()) {
                if (propertyName.startsWith("spark")) {
                    String value = properties.getProperty(propertyName);
                    sparkConf.put(propertyName, properties.getProperty(propertyName));
                    LOG.info(String.format("load spark property from %s (%s -> %s).", SPARK_DEFAULT_CONF_FILE,
                            propertyName, LogUtils.maskIfPassword(propertyName, value)));
                }
            }
        }
    } catch (IOException e) {
        LOG.info("Failed to open spark configuration file:" + SPARK_DEFAULT_CONF_FILE, e);
    } finally {
        if (inputStream != null) {
            try {
                inputStream.close();
            } catch (IOException e) {
                LOG.debug("Failed to close inputstream.", e);
            }
        }
    }

    // load properties from hive configurations, including both spark.* properties,
    // properties for remote driver RPC, and yarn properties for Spark on YARN mode.
    String sparkMaster = hiveConf.get("spark.master");
    if (sparkMaster == null) {
        sparkMaster = sparkConf.get("spark.master");
        hiveConf.set("spark.master", sparkMaster);
    }
    String deployMode = null;
    if (!SparkClientUtilities.isLocalMaster(sparkMaster)) {
        deployMode = hiveConf.get(SPARK_DEPLOY_MODE);
        if (deployMode == null) {
            deployMode = sparkConf.get(SPARK_DEPLOY_MODE);
            if (deployMode == null) {
                deployMode = SparkClientUtilities.getDeployModeFromMaster(sparkMaster);
            }
            if (deployMode == null) {
                deployMode = SPARK_DEFAULT_DEPLOY_MODE;
            }
            hiveConf.set(SPARK_DEPLOY_MODE, deployMode);
        }
    }
    if (SessionState.get() != null && SessionState.get().getConf() != null) {
        SessionState.get().getConf().set("spark.master", sparkMaster);
        if (deployMode != null) {
            SessionState.get().getConf().set(SPARK_DEPLOY_MODE, deployMode);
        }
    }
    if (SparkClientUtilities.isYarnClusterMode(sparkMaster, deployMode)) {
        sparkConf.put("spark.yarn.maxAppAttempts", "1");
    }
    for (Map.Entry<String, String> entry : hiveConf) {
        String propertyName = entry.getKey();
        if (propertyName.startsWith("spark")) {
            String value = hiveConf.get(propertyName);
            sparkConf.put(propertyName, value);
            LOG.info(String.format("load spark property from hive configuration (%s -> %s).", propertyName,
                    LogUtils.maskIfPassword(propertyName, value)));
        } else if (propertyName.startsWith("yarn") && SparkClientUtilities.isYarnMaster(sparkMaster)) {
            String value = hiveConf.get(propertyName);
            // Add spark.hadoop prefix for yarn properties as SparkConf only accept properties
            // started with spark prefix, Spark would remove spark.hadoop prefix lately and add
            // it to its hadoop configuration.
            sparkConf.put("spark.hadoop." + propertyName, value);
            LOG.info(String.format("load yarn property from hive configuration in %s mode (%s -> %s).",
                    sparkMaster, propertyName, LogUtils.maskIfPassword(propertyName, value)));
        } else if (propertyName.equals(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY)) {
            String value = hiveConf.get(propertyName);
            if (value != null && !value.isEmpty()) {
                sparkConf.put("spark.hadoop." + propertyName, value);
            }
        } else if (propertyName.startsWith("hbase") || propertyName.startsWith("zookeeper.znode")) {
            // Add HBase related configuration to Spark because in security mode, Spark needs it
            // to generate hbase delegation token for Spark. This is a temp solution to deal with
            // Spark problem.
            String value = hiveConf.get(propertyName);
            sparkConf.put("spark.hadoop." + propertyName, value);
            LOG.info(String.format("load HBase configuration (%s -> %s).", propertyName,
                    LogUtils.maskIfPassword(propertyName, value)));
        } else if (propertyName.startsWith("oozie")) {
            String value = hiveConf.get(propertyName);
            sparkConf.put("spark." + propertyName, value);
            LOG.info(String.format("Pass Oozie configuration (%s -> %s).", propertyName,
                    LogUtils.maskIfPassword(propertyName, value)));
        }

        if (RpcConfiguration.HIVE_SPARK_RSC_CONFIGS.contains(propertyName)) {
            String value = RpcConfiguration.getValue(hiveConf, propertyName);
            sparkConf.put(propertyName, value);
            LOG.info(String.format("load RPC property from hive configuration (%s -> %s).", propertyName,
                    LogUtils.maskIfPassword(propertyName, value)));
        }
    }

    Set<String> classes = Sets.newHashSet(Splitter.on(",").trimResults().omitEmptyStrings()
            .split(Strings.nullToEmpty(sparkConf.get("spark.kryo.classesToRegister"))));
    classes.add(Writable.class.getName());
    classes.add(VectorizedRowBatch.class.getName());
    classes.add(BytesWritable.class.getName());
    classes.add(HiveKey.class.getName());
    sparkConf.put("spark.kryo.classesToRegister", Joiner.on(",").join(classes));

    // set yarn queue name
    final String sparkQueueNameKey = "spark.yarn.queue";
    if (SparkClientUtilities.isYarnMaster(sparkMaster) && hiveConf.get(sparkQueueNameKey) == null) {
        String queueName = hiveConf.get("mapreduce.job.queuename");
        if (queueName != null) {
            sparkConf.put(sparkQueueNameKey, queueName);
        }
    }

    // Disable it to avoid verbose app state report in yarn-cluster mode
    if (SparkClientUtilities.isYarnClusterMode(sparkMaster, deployMode)
            && sparkConf.get(SPARK_WAIT_APP_COMPLETE) == null) {
        sparkConf.put(SPARK_WAIT_APP_COMPLETE, "false");
    }

    // Set the credential provider passwords if found, if there is job specific password
    // the credential provider location is set directly in the execute method of LocalSparkClient
    // and submit method of RemoteHiveSparkClient when the job config is created
    String password = HiveConfUtil.getJobCredentialProviderPassword(hiveConf);
    if (password != null) {
        addCredentialProviderPassword(sparkConf, password);
    }
    return sparkConf;
}

From source file:org.apache.sqoop.submission.spark.SqoopSparkClientFactory.java

public static Map<String, String> prepareSparkConfMapFromSqoopConfig(SqoopConf sqoopConf) {
    Map<String, String> sparkConf = new HashMap<String, String>();
    // set default spark configurations.
    sparkConf.put(Constants.SPARK_MASTER, SPARK_DEFAULT_MASTER);
    sparkConf.put(Constants.SPARK_APP_NAME, SPARK_DEFAULT_APP_NAME);
    sparkConf.put(Constants.SPARK_SERIALIZER, SPARK_DEFAULT_SERIALIZER);

    for (Map.Entry<String, String> p : sqoopConf.getProps().entrySet()) {
        LOG.info("sqoop spark properties from: " + p.getKey() + ": " + p.getValue());
    }/* www  .j  a va  2s  . c o m*/
    // load properties from spark-defaults.conf.
    InputStream inputStream = null;
    try {
        inputStream = SqoopSparkClientFactory.class.getClassLoader()
                .getResourceAsStream(SPARK_DEFAULT_CONF_FILE);
        if (inputStream != null) {
            LOG.info("Loading spark properties from:" + SPARK_DEFAULT_CONF_FILE);
            Properties properties = new Properties();
            properties.load(new InputStreamReader(inputStream, CharsetNames.UTF_8));
            for (String propertyName : properties.stringPropertyNames()) {
                if (propertyName.startsWith("spark")) {
                    String value = properties.getProperty(propertyName);
                    sparkConf.put(propertyName, properties.getProperty(propertyName));
                    LOG.info(String.format("Load spark property from %s (%s -> %s).", SPARK_DEFAULT_CONF_FILE,
                            propertyName, value));
                }
            }
        }
    } catch (IOException e) {
        LOG.info("Failed to open spark configuration file:" + SPARK_DEFAULT_CONF_FILE, e);
    } finally {
        if (inputStream != null) {
            try {
                inputStream.close();
            } catch (IOException e) {
                LOG.debug("Failed to close inputstream.", e);
            }
        }
    }
    return sparkConf;
}

From source file:org.artifactory.common.wicket.util.WicketUtils.java

/**
 * Returns a syntax highlighter. If the size of the string exceeds the size limit defined in the system properties,
 * than a simple text content panel will be returned
 *
 * @param componentId ID to assign to the returned component
 * @param toDisplay   String to display/*  ww w .ja v a 2  s . co m*/
 * @param syntaxType  Type of syntax to use
 * @return Text displaying component
 */
public static Component getSyntaxHighlighter(String componentId, String toDisplay, Syntax syntaxType) {
    try {
        if (toDisplay != null && ConstantValues.uiSyntaxColoringMaxTextSizeBytes
                .getLong() >= toDisplay.getBytes(CharsetNames.UTF_8).length) {
            return new SyntaxHighlighter(componentId, toDisplay, syntaxType);
        } else {
            TextContentPanel contentPanel = new TextContentPanel(componentId);
            contentPanel.add(new CssClass("lines"));
            return contentPanel.setContent(toDisplay);
        }
    } catch (UnsupportedEncodingException e) {
        throw new RuntimeException(e);
    }
}

From source file:org.artifactory.repo.remote.browse.S3RepositorySecuredHelper.java

private static String signWithHmacSha1(String awsSecretKey, String canonicalString) throws Exception {
    try {//  w  w w. j a  v a  2 s .co  m
        SecretKeySpec signingKey = new SecretKeySpec(awsSecretKey.getBytes(CharsetNames.UTF_8), HMAC_SHA1);
        Mac mac = Mac.getInstance(HMAC_SHA1);
        mac.init(signingKey);
        byte[] b64 = Base64.encodeBase64(mac.doFinal(canonicalString.getBytes(CharsetNames.UTF_8)));
        return new String(b64, CharsetNames.UTF_8);
    } catch (Exception e) {
        throw new RuntimeException("Could not sign with " + HMAC_SHA1, e);
    }
}

From source file:org.artifactory.repo.remote.browse.S3RepositorySecuredHelper.java

private static String encodeUrl(String string) {
    try {//from ww w  . j  a  v a 2s .co  m
        return URLEncoder.encode(string, CharsetNames.UTF_8).replace("+", "%20").replace("%40", "@");
    } catch (UnsupportedEncodingException e) {
        throw new RuntimeException("Could not encode string " + string, e);
    }
}