Example usage for java.io DataOutput writeUTF

List of usage examples for java.io DataOutput writeUTF

Introduction

In this page you can find the example usage for java.io DataOutput writeUTF.

Prototype

void writeUTF(String s) throws IOException;

Source Link

Document

Writes two bytes of length information to the output stream, followed by the modified UTF-8 representation of every character in the string s.

Usage

From source file:org.jactr.io.antlr3.serialization.Serializer.java

static public void write(CommonTree tree, DataOutput output) throws IOException {
    output.writeInt(tree.getType());//from w w  w.j  a v  a2s.  co  m
    output.writeUTF(tree.getText());

    // we ignore the token

    CommonToken token = (CommonToken) tree.getToken();
    output.writeInt(token.getType());
    output.writeUTF(token.getText());
    output.writeInt(token.getStartIndex());
    output.writeInt(token.getStopIndex());
    output.writeInt(token.getLine());
    output.writeInt(token.getCharPositionInLine());

    output.writeInt(tree.getTokenStartIndex());
    output.writeInt(tree.getTokenStopIndex());

    int start = -1;
    int end = -1;
    String url = "";
    if (tree instanceof DetailedCommonTree) {
        start = ((DetailedCommonTree) tree).getStartOffset();
        end = ((DetailedCommonTree) tree).getStopOffset();
        if (((DetailedCommonTree) tree).getSource() != null)
            url = ((DetailedCommonTree) tree).getSource().toString();
    }

    output.writeInt(start);
    output.writeInt(end);
    output.writeUTF(url);

    // children
    output.writeInt(tree.getChildCount());
    for (int i = 0; i < tree.getChildCount(); i++)
        write((CommonTree) tree.getChild(i), output);
}

From source file:mobi.hsz.idea.gitignore.indexing.IgnoreEntryOccurrence.java

/**
 * Static helper to write given {@link IgnoreEntryOccurrence} to the output stream.
 *
 * @param out   output stream//from   www  . j  a  v a  2s  .  c  o  m
 * @param entry entry to write
 * @throws IOException I/O exception
 */
public static synchronized void serialize(@NotNull DataOutput out, @NotNull IgnoreEntryOccurrence entry)
        throws IOException {
    out.writeUTF(entry.getFile().getPath());
    out.writeInt(entry.items.size());
    for (Pair<Pattern, Boolean> item : entry.items) {
        out.writeUTF(item.first.pattern());
        out.writeBoolean(item.second);
    }
}

From source file:uk.ac.gla.terrier.probos.Utils.java

public static void writeStringOrNull(DataOutput out, String s) throws IOException {
    if (s == null) {
        out.writeBoolean(false);/*ww w . j  ava  2  s. com*/
    } else {
        out.writeBoolean(true);
        out.writeUTF(s);
    }
}

From source file:com.bigdata.dastor.utils.FBUtilities.java

public static void writeNullableString(String key, DataOutput dos) throws IOException {
    dos.writeBoolean(key == null);//from   w w w .ja v a  2  s .  c o m
    if (key != null) {
        dos.writeUTF(key);
    }
}

From source file:com.linkedin.cubert.io.rubix.RubixFile.java

private static void extract(List<RubixFile<Tuple, Object>> rfiles, long blockId, int numBlocks, String output)
        throws IOException, ClassNotFoundException, InstantiationException, IllegalAccessException {
    Configuration conf = new JobConf();
    File outFile = new File(output);
    if (outFile.exists()) {
        outFile.delete();//from  w ww.  ja va 2 s  .c  o m
    }
    outFile.createNewFile();
    BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(outFile));
    ByteArrayOutputStream keySectionStream = new ByteArrayOutputStream();
    DataOutput keySectionOut = new DataOutputStream(keySectionStream);
    SerializationFactory serializationFactory = new SerializationFactory(conf);
    RubixFile<Tuple, Object> lastrFile = null;
    JsonNode json;
    long totalLength = 0;

    final int BUF_SIZE = 32 * 1024;
    long blockIds[] = new long[numBlocks];
    int foundBlocks = 0;

    for (int i = 0; i < numBlocks; i++)
        blockIds[i] = blockId + i;

    for (int i = 0; i < numBlocks; i++) {
        boolean found = false;
        for (RubixFile<Tuple, Object> rfile : rfiles) {
            print.f("Checking %s", rfile.path.toString());
            List<KeyData<Tuple>> keyDataList = rfile.getKeyData();
            for (KeyData<Tuple> keyData : keyDataList) {
                if (keyData.getBlockId() == blockIds[i]) {
                    long offset = keyData.getOffset();
                    long length = keyData.getLength();
                    Tuple key = keyData.getKey();
                    print.f("Extracting block %d (off=%d len=%d) from %s", keyData.getBlockId(), offset, length,
                            rfile.path.toString());

                    // copy the data
                    if (length > 0) {
                        FileSystem fs = FileSystem.get(conf);
                        FSDataInputStream in = fs.open(rfile.path);
                        in.seek(offset);

                        byte[] data = new byte[BUF_SIZE];
                        long toRead = length;
                        while (toRead > 0) {
                            int thisRead = toRead > BUF_SIZE ? BUF_SIZE : (int) toRead;
                            in.readFully(data, 0, thisRead);
                            bos.write(data, 0, thisRead);
                            toRead -= thisRead;
                            System.out.print(".");
                        }
                        System.out.println();
                    }
                    // copy the key section
                    Serializer<Tuple> keySerializer = serializationFactory.getSerializer(rfile.getKeyClass());
                    keySerializer.open(keySectionStream);

                    keySerializer.serialize(key);
                    keySectionOut.writeLong(totalLength); // position
                    keySectionOut.writeLong(keyData.getBlockId());
                    keySectionOut.writeLong(keyData.getNumRecords());
                    foundBlocks++;
                    totalLength += length;
                    lastrFile = rfile;

                    found = true;
                    break;

                }
            }
            if (found) {
                break;
            }
        }
        if (!found)
            System.err.println("Cannot locate block with id " + blockIds[i]);
    }
    byte[] trailerBytes = keySectionStream.toByteArray();

    json = JsonUtils.cloneNode(lastrFile.metadataJson);
    ((ObjectNode) json).put("numberOfBlocks", foundBlocks);

    DataOutput out = new DataOutputStream(bos);
    out.writeUTF(json.toString());
    out.writeInt(trailerBytes.length);
    out.write(trailerBytes);
    out.writeLong(totalLength); // trailer start offset
    bos.close();
}

From source file:com.chinamobile.bcbsp.ml.VectorWritable.java

public static void writeVector(DoubleVector vector, DataOutput out) throws IOException {
    if (vector == null) {
        LOG.info("lin test : VectorWritable write Vector is null");
    } else {/*ww w . j  a va2 s.c om*/
        LOG.info("lin test : VectorWritable write Vector is not null");
    }
    out.writeInt(vector.getLength());
    for (int i = 0; i < vector.getDimension(); i++) {
        out.writeDouble(vector.get(i));
    }

    if (vector.isNamed() && vector.getName() != null) {
        out.writeBoolean(true);
        out.writeUTF(vector.getName());
    } else {
        out.writeBoolean(false);
    }
}

From source file:com.splicemachine.derby.impl.sql.execute.operations.SpliceBaseOperation.java

public static void writeNullableString(String value, DataOutput out) throws IOException {
    if (value != null) {
        out.writeBoolean(true);//from   ww w  .  j  a va 2  s.com
        out.writeUTF(value);
    } else {
        out.writeBoolean(false);
    }
}

From source file:com.sirius.hadoop.job.onlinetime.StatusKey.java

@Override
public void write(DataOutput out) throws IOException {
    out.writeUTF(userId != null ? userId : StringUtils.EMPTY);
    out.writeLong(time);/*from  w  w  w .j  a  va2s  . com*/
}

From source file:ph.fingra.hadoop.mapred.parts.performance.domain.AppNewuserHourKey.java

@Override
public void write(DataOutput out) throws IOException {

    out.writeUTF(this.appkey);
    out.writeUTF(this.token);
}

From source file:ph.fingra.hadoop.mapred.parts.performance.domain.HourSessionKey.java

@Override
public void write(DataOutput out) throws IOException {

    out.writeUTF(this.appkey);
    out.writeUTF(this.session);
    out.writeUTF(this.localtime);
}