Example usage for java.io DataOutput writeBytes

List of usage examples for java.io DataOutput writeBytes

Introduction

In this page you can find the example usage for java.io DataOutput writeBytes.

Prototype

void writeBytes(String s) throws IOException;

Source Link

Document

Writes a string to the output stream.

Usage

From source file:cn.iie.haiep.hbase.value.Bytes.java

/**
 * Writes a string as a fixed-size field, padded with zeros.
 *//* w ww  .  j  a v  a 2s. c  o m*/
public static void writeStringFixedSize(final DataOutput out, String s, int size) throws IOException {
    byte[] b = toBytes(s);
    if (b.length > size) {
        throw new IOException("Trying to write " + b.length + " bytes (" + toStringBinary(b)
                + ") into a field of length " + size);
    }

    out.writeBytes(s);
    for (int i = 0; i < size - s.length(); ++i)
        out.writeByte(0);
}

From source file:dk.statsbiblioteket.util.LineReaderTest.java

public void writeSample(DataOutput out) throws Exception {
    out.writeInt(12345);/*w  w w . ja  v  a2  s.  co m*/
    out.writeInt(-87);
    out.writeLong(123456789L);
    out.write("Hello World!\n".getBytes("utf-8"));
    out.write("Another world\n".getBytes("utf-8"));
    out.writeFloat(0.5f);
    out.writeBoolean(true);
    out.writeBoolean(false);
    out.writeByte(12);
    out.writeByte(-12);
    out.write(129);
    out.writeShort(-4567);
    out.writeBytes("ASCII");
}

From source file:org.apache.hadoop.hive.ql.exec.ACLTask.java

private int showGroup(Hive db, showGroupsDesc sGD) throws HiveException {

    List<String> groups = null;
    if (sGD.getPattern() != null) {
        LOG.info("pattern: " + sGD.getPattern());
        groups = db.getGroups(sGD.getPattern());
        LOG.info("results : " + groups.size());
    } else// ww w . jav  a 2s  . c o m
        groups = db.getGroups(".*");

    try {
        FileSystem fs = sGD.getResFile().getFileSystem(conf);
        DataOutput outStream = (DataOutput) fs.create(sGD.getResFile());
        SortedSet<String> sortedTbls = new TreeSet<String>(groups);
        Iterator<String> iterTbls = sortedTbls.iterator();

        while (iterTbls.hasNext()) {
            outStream.writeBytes(iterTbls.next());
            outStream.write(terminator);
        }
        ((FSDataOutputStream) outStream).close();
    } catch (FileNotFoundException e) {
        LOG.warn("show groups: " + StringUtils.stringifyException(e));
        if (SessionState.get() != null)
            SessionState.get().ssLog("show groups: " + StringUtils.stringifyException(e));
        return 1;
    } catch (IOException e) {
        LOG.warn("show groups: " + StringUtils.stringifyException(e));
        if (SessionState.get() != null)
            SessionState.get().ssLog("show groups: " + StringUtils.stringifyException(e));
        return 1;
    } catch (Exception e) {
        throw new HiveException(e.toString());
    }
    return 0;
}

From source file:org.apache.hadoop.hive.ql.exec.ACLTask.java

private int showRoles(Hive db, showRolesDesc showRolesD) throws HiveException {

    List<String> roles;
    if (showRolesD.getUser() == null)
        roles = db.showRoles(showRolesD.getWho());
    else//from  www .j a  v  a2s .c o  m
        return 0;

    try {
        FileSystem fs = showRolesD.getTmpFile().getFileSystem(conf);
        DataOutput outStream = (DataOutput) fs.create(showRolesD.getTmpFile());
        LOG.info("show roles tmp file:" + showRolesD.getTmpFile().toString());
        SortedSet<String> sortedRoles = new TreeSet<String>(roles);
        Iterator<String> iterRoles = sortedRoles.iterator();

        outStream.writeBytes("ALL roles in TDW:");
        outStream.write(terminator);

        while (iterRoles.hasNext()) {
            outStream.writeBytes(iterRoles.next());
            outStream.write(terminator);
        }
        ((FSDataOutputStream) outStream).close();
    } catch (FileNotFoundException e) {
        LOG.warn("show roles: " + StringUtils.stringifyException(e));
        return 1;
    } catch (IOException e) {
        LOG.warn("show roles: " + StringUtils.stringifyException(e));
        return 1;
    } catch (Exception e) {
        throw new HiveException(e.toString());
    }
    LOG.info("show roles OK");
    return 0;
}

From source file:org.apache.hadoop.hive.ql.exec.ACLTask.java

private int showUsers(Hive db, showUsersDesc showUsersD) throws HiveException {
    List<String> users = db.showUsers(showUsersD.getWho());

    try {/* www .j a  v a2  s  . co m*/
        FileSystem fs = showUsersD.getTmpFile().getFileSystem(conf);
        DataOutput outStream = (DataOutput) fs.create(showUsersD.getTmpFile());
        SortedSet<String> sortedUsers = new TreeSet<String>(users);
        Iterator<String> iterUsers = sortedUsers.iterator();

        outStream.writeBytes("All users in TDW:");
        outStream.write(terminator);

        while (iterUsers.hasNext()) {
            outStream.writeBytes(iterUsers.next());
            outStream.write(terminator);
        }
        ((FSDataOutputStream) outStream).close();
    } catch (FileNotFoundException e) {
        LOG.warn("show users: " + StringUtils.stringifyException(e));
        return 1;
    } catch (IOException e) {
        LOG.warn("show users: " + StringUtils.stringifyException(e));
        return 1;
    } catch (Exception e) {
        throw new HiveException(e.toString());
    }
    LOG.info("show users OK");
    return 0;
}

From source file:org.apache.hadoop.hive.ql.exec.ACLTask.java

private int showGrants(Hive db, showGrantsDesc showGrantsD) throws HiveException {
    List<String> grants = db.showGrants(showGrantsD.getWho(), showGrantsD.getUser());

    try {//from  w  w w . j  av  a 2s.  co  m
        FileSystem fs = showGrantsD.getTmpFile().getFileSystem(conf);
        DataOutput outStream = (DataOutput) fs.create(showGrantsD.getTmpFile());
        Iterator<String> iterGrants = grants.iterator();

        while (iterGrants.hasNext()) {
            outStream.writeBytes(iterGrants.next());
            outStream.write(terminator);
        }
        ((FSDataOutputStream) outStream).close();
    } catch (FileNotFoundException e) {
        LOG.warn("show grants: " + StringUtils.stringifyException(e));
        return 1;
    } catch (IOException e) {
        LOG.warn("show grants: " + StringUtils.stringifyException(e));
        return 1;
    } catch (Exception e) {
        throw new HiveException(e.toString());
    }
    return 0;
}

From source file:org.apache.hadoop.hive.ql.exec.DCLTask.java

private int showGrants(ShowGrantsDesc showGntsDesc) throws HiveException, AuthorizeException {
    String userName = showGntsDesc.getUser();
    if (userName == null) {
        userName = SessionState.get().getUserName();
    }/* ww w  . j ava  2 s  .  co  m*/
    User user = db.getUser(userName);
    try {
        if (user == null) {
            FileSystem fs = showGntsDesc.getResFile().getFileSystem(conf);
            DataOutput outStream = (DataOutput) fs.open(showGntsDesc.getResFile());
            String errMsg = "User " + userName + " does not exist";
            outStream.write(errMsg.getBytes("UTF-8"));
            ((FSDataOutputStream) outStream).close();
            return 0;
        }
    } catch (FileNotFoundException e) {
        LOG.info("show grants: " + StringUtils.stringifyException(e));
        return 1;
    } catch (IOException e) {
        LOG.info("show grants: " + StringUtils.stringifyException(e));
        return 1;
    }

    try {

        LOG.info("DCLTask: got grant privilege for " + user.getName());

        FileSystem fs = showGntsDesc.getResFile().getFileSystem(conf);
        DataOutput outStream = (DataOutput) fs.create(showGntsDesc.getResFile());

        List<AuthorizeEntry> entries = SessionState.get().getAuthorizer().getAllPrivileges(userName);
        if (entries == null || entries.isEmpty()) {
            return 0;
        }
        for (AuthorizeEntry e : entries) {
            switch (e.getPrivLevel()) {
            case GLOBAL_LEVEL:
                outStream.writeBytes("Global grants: ");
                break;
            case DATABASE_LEVEL:
                outStream.writeBytes(String.format("Grants on database %s:", e.getDb().getName()));
                break;
            case TABLE_LEVEL:
                outStream.writeBytes(String.format("Grants on table %s.%s:", e.getTable().getDbName(),
                        e.getTable().getTableName()));
                break;
            case COLUMN_LEVEL:
                String fields = "";
                if (e.getFields() != null && !e.getFields().isEmpty()) {
                    for (FieldSchema f : e.getFields()) {
                        fields += f.getName() + ",";
                    }
                } else {
                    fields = "<null>";
                }
                outStream.writeBytes(String.format("Grants on column %s.%s.[%s]:", e.getTable().getDbName(),
                        e.getTable().getTableName(), fields));
                break;
            default:
            }
            for (Privilege p : e.getRequiredPrivs()) {
                outStream.writeBytes(p.toString() + " ");
            }
            outStream.write(terminator);
        }
        LOG.info("DCLTask: written data for " + user.getName());
        ((FSDataOutputStream) outStream).close();
    } catch (FileNotFoundException e) {
        LOG.info("show grants: " + StringUtils.stringifyException(e));
        return 1;
    } catch (IOException e) {
        LOG.info("show grants: " + StringUtils.stringifyException(e));
        return 1;
    } catch (Exception e) {
        throw new HiveException(e.toString());
    }
    return 0;
}

From source file:org.pepstock.jem.util.filters.predicates.JemFilterPredicate.java

@Override
public void writeData(DataOutput dataOutput) throws IOException {
    // replace \n because are not supported from serialization engine
    // of Hazelcast
    String ee = stream.toXML(filter).replace('\n', ' ');
    dataOutput.writeBytes(ee);
}