Example usage for org.apache.commons.io Charsets UTF_8

List of usage examples for org.apache.commons.io Charsets UTF_8

Introduction

In this page you can find the example usage for org.apache.commons.io Charsets UTF_8.

Prototype

Charset UTF_8

To view the source code for org.apache.commons.io Charsets UTF_8.

Click Source Link

Document

Eight-bit Unicode Transformation Format.

Usage

From source file:org.ambraproject.wombat.service.remote.ApiAddress.java

public String getAddress() {
    return path + (parameters.isEmpty() ? "" : "?" + URLEncodedUtils.format(parameters, Charsets.UTF_8));
}

From source file:org.ambraproject.wombat.util.UrlParamBuilder.java

public String format() {
    return URLEncodedUtils.format(params, Charsets.UTF_8).replace("%2F", "/");
}

From source file:org.apache.drill.exec.store.solr.SolrRecordReader.java

private void processRecord(ValueVector vv, Object fieldValue, int recordCounter) {
    String fieldValueStr = null;/*from w ww.  j  a  v a  2s.  c o m*/
    byte[] record = null;
    try {
        fieldValueStr = fieldValue.toString();
        record = fieldValueStr.getBytes(Charsets.UTF_8);

        if (vv.getClass().equals(NullableVarCharVector.class)) {
            NullableVarCharVector v = (NullableVarCharVector) vv;
            v.getMutator().setSafe(recordCounter, record, 0, record.length);
            v.getMutator().setValueLengthSafe(recordCounter, record.length);
        } else if (vv.getClass().equals(NullableBigIntVector.class)) {
            NullableBigIntVector v = (NullableBigIntVector) vv;
            BigDecimal bd = new BigDecimal(fieldValueStr);
            v.getMutator().setSafe(recordCounter, bd.longValue());
        } else if (vv.getClass().equals(NullableIntVector.class)) {
            NullableIntVector v = (NullableIntVector) vv;
            v.getMutator().setSafe(recordCounter, Integer.parseInt(fieldValueStr));
        } else if (vv.getClass().equals(NullableFloat8Vector.class)) {
            NullableFloat8Vector v = (NullableFloat8Vector) vv;
            Double d = Double.parseDouble(fieldValueStr);
            v.getMutator().setSafe(recordCounter, d);
        } else if (vv.getClass().equals(DateVector.class)) {
            DateVector v = (DateVector) vv;
            long dtime = 0L;
            try {
                TemporalAccessor accessor = SolrRecordReader.timeFormatter.parse(fieldValueStr);
                Date date = Date.from(Instant.from(accessor));
                dtime = date.getTime();
            } catch (Exception e) {
                SimpleDateFormat dateParser = new SimpleDateFormat(SolrRecordReader.defaultDateFormat);
                dtime = dateParser.parse(fieldValueStr).getTime();
            }

            v.getMutator().setSafe(recordCounter, dtime);
        } else if (vv.getClass().equals(NullableTimeStampVector.class)) {
            NullableTimeStampVector v = (NullableTimeStampVector) vv;
            DateTimeFormatter timeFormatter = DateTimeFormatter.ISO_DATE_TIME;
            long dtime = 0L;

            try {
                TemporalAccessor accessor = timeFormatter.parse(fieldValueStr);
                Date date = Date.from(Instant.from(accessor));
                dtime = date.getTime();
            } catch (Exception e) {
                SimpleDateFormat dateParser = new SimpleDateFormat(SolrRecordReader.defaultDateFormat);
                dtime = dateParser.parse(fieldValueStr).getTime();
            }
            v.getMutator().setSafe(recordCounter, dtime);
        }
    } catch (Exception e) {
        SolrRecordReader.logger.error("Error processing record: " + e.getMessage() + vv.getField().getPath()
                + " Field type " + vv.getField().getType() + " " + vv.getClass());
    }
}

From source file:org.apache.hadoop.fs.http.client.HttpFSUtils.java

/**
 * Convenience method that JSON Parses the <code>InputStream</code> of a
 * <code>HttpURLConnection</code>.
 *
 * @param conn the <code>HttpURLConnection</code>.
 *
 * @return the parsed JSON object./*from  w  w w  .  j  a  va 2s  .  com*/
 *
 * @throws IOException thrown if the <code>InputStream</code> could not be
 * JSON parsed.
 */
static Object jsonParse(HttpURLConnection conn) throws IOException {
    try {
        JSONParser parser = new JSONParser();
        return parser.parse(new InputStreamReader(conn.getInputStream(), Charsets.UTF_8));
    } catch (ParseException ex) {
        throw new IOException("JSON parser error, " + ex.getMessage(), ex);
    }
}

From source file:org.apache.hadoop.fs.http.server.HttpFSAuthenticationFilter.java

/**
 * Returns the hadoop-auth configuration from HttpFSServer's configuration.
 * <p>//ww  w.jav  a2  s. c  o  m
 * It returns all HttpFSServer's configuration properties prefixed with
 * <code>httpfs.authentication</code>. The <code>httpfs.authentication</code>
 * prefix is removed from the returned property names.
 *
 * @param configPrefix parameter not used.
 * @param filterConfig parameter not used.
 *
 * @return hadoop-auth configuration read from HttpFSServer's configuration.
 */
@Override
protected Properties getConfiguration(String configPrefix, FilterConfig filterConfig) throws ServletException {
    Properties props = new Properties();
    Configuration conf = HttpFSServerWebApp.get().getConfig();

    props.setProperty(AuthenticationFilter.COOKIE_PATH, "/");
    for (Map.Entry<String, String> entry : conf) {
        String name = entry.getKey();
        if (name.startsWith(CONF_PREFIX)) {
            String value = conf.get(name);
            name = name.substring(CONF_PREFIX.length());
            props.setProperty(name, value);
        }
    }

    String signatureSecretFile = props.getProperty(SIGNATURE_SECRET_FILE, null);
    if (signatureSecretFile == null) {
        throw new RuntimeException("Undefined property: " + SIGNATURE_SECRET_FILE);
    }

    try {
        StringBuilder secret = new StringBuilder();
        Reader reader = new InputStreamReader(new FileInputStream(signatureSecretFile), Charsets.UTF_8);
        int c = reader.read();
        while (c > -1) {
            secret.append((char) c);
            c = reader.read();
        }
        reader.close();
        props.setProperty(AuthenticationFilter.SIGNATURE_SECRET, secret.toString());
    } catch (IOException ex) {
        throw new RuntimeException("Could not read HttpFS signature secret file: " + signatureSecretFile);
    }
    return props;
}

From source file:org.apache.hadoop.hdfs.DFSUtilClient.java

/**
 * Converts a string to a byte array using UTF8 encoding.
 *///from ww w .  j  a va 2 s  .c o  m
public static byte[] string2Bytes(String str) {
    return str.getBytes(Charsets.UTF_8);
}

From source file:org.apache.hadoop.hdfs.server.datanode.web.webhdfs.ParameterParser.java

ParameterParser(QueryStringDecoder decoder, Configuration conf) {
    this.path = decodeComponent(decoder.path().substring(WEBHDFS_PREFIX_LENGTH), Charsets.UTF_8);
    this.params = decoder.parameters();
    this.conf = conf;
}

From source file:org.apache.hadoop.hdfs.server.datanode.web.webhdfs.WebHdfsHandler.java

private void onGetFileChecksum(ChannelHandlerContext ctx) throws IOException {
    MD5MD5CRC32FileChecksum checksum = null;
    final String nnId = params.namenodeId();
    DFSClient dfsclient = newDfsClient(nnId, conf);
    try {/*from   w  w  w . jav a2  s.  co m*/
        checksum = dfsclient.getFileChecksum(path, Long.MAX_VALUE);
        dfsclient.close();
        dfsclient = null;
    } finally {
        IOUtils.cleanup(LOG, dfsclient);
    }
    final byte[] js = JsonUtil.toJsonString(checksum).getBytes(Charsets.UTF_8);
    DefaultFullHttpResponse resp = new DefaultFullHttpResponse(HTTP_1_1, OK, Unpooled.wrappedBuffer(js));

    resp.headers().set(CONTENT_TYPE, APPLICATION_JSON_UTF8);
    resp.headers().set(CONTENT_LENGTH, js.length);
    resp.headers().set(CONNECTION, CLOSE);
    ctx.writeAndFlush(resp).addListener(ChannelFutureListener.CLOSE);
}

From source file:org.apache.hadoop.hdfs.server.namenode.FSDirectory.java

/**
 * Add the given filename to the fs.//  ww  w .j  a va 2  s  . com
 *
 * @throws FileAlreadyExistsException
 * @throws QuotaExceededException
 * @throws UnresolvedLinkException
 */
INodesInPath addFile(INodesInPath existing, String localName, PermissionStatus permissions, short replication,
        long preferredBlockSize, String clientName, String clientMachine) throws IOException {

    long modTime = now();

    INodeFile newNode = new INodeFile(IDsGeneratorFactory.getInstance().getUniqueINodeID(), permissions,
            BlockInfoContiguous.EMPTY_ARRAY, replication, modTime, modTime, preferredBlockSize, (byte) 0);
    newNode.setLocalNameNoPersistance(localName.getBytes(Charsets.UTF_8));
    newNode.toUnderConstruction(clientName, clientMachine);

    INodesInPath newiip;
    newiip = addINode(existing, newNode);

    if (newiip == null) {
        NameNode.stateChangeLog.info("DIR* addFile: failed to add " + existing.getPath() + "/" + localName);
        return null;
    }

    if (NameNode.stateChangeLog.isDebugEnabled()) {
        NameNode.stateChangeLog.debug("DIR* addFile: " + localName + " is added");
    }
    return newiip;
}

From source file:org.apache.hadoop.hdfs.server.namenode.FSDirMkdirOp.java

/**
* For a given absolute path, create all ancestors as directories along the
* path. All ancestors inherit their parent's permission plus an implicit
* u+wx permission. This is used by create() and addSymlink() for
* implicitly creating all directories along the path.
*
* For example, path="/foo/bar/spam", "/foo" is an existing directory,
* "/foo/bar" is not existing yet, the function will create directory bar.
*
* @return a tuple which contains both the new INodesInPath (with all the
* existing and newly created directories) and the last component in the
* relative path. Or return null if there are errors.
*//*from  w w  w  .  j  av  a2  s  .  co m*/
static Map.Entry<INodesInPath, String> createAncestorDirectories(FSDirectory fsd, INodesInPath iip,
        PermissionStatus permission) throws IOException {
    final String last = new String(iip.getLastLocalName(), Charsets.UTF_8);
    INodesInPath existing = iip.getExistingINodes();
    List<String> children = iip.getPath(existing.length(), iip.length() - existing.length());
    int size = children.size();
    if (size > 1) { // otherwise all ancestors have been created
        List<String> directories = children.subList(0, size - 1);
        INode parentINode = existing.getLastINode();
        // Ensure that the user can traversal the path by adding implicit
        // u+wx permission to all ancestor directories
        existing = createChildrenDirectories(fsd, existing, directories,
                addImplicitUwx(parentINode.getPermissionStatus(), permission));
        if (existing == null) {
            return null;
        }
    }
    return new AbstractMap.SimpleImmutableEntry<>(existing, last);
}