Example usage for org.apache.hadoop.io SequenceFile createWriter

List of usage examples for org.apache.hadoop.io SequenceFile createWriter

Introduction

In this page you can find the example usage for org.apache.hadoop.io SequenceFile createWriter.

Prototype

@Deprecated
public static Writer createWriter(FileSystem fs, Configuration conf, Path name, Class keyClass, Class valClass,
        CompressionType compressionType, CompressionCodec codec, Progressable progress) throws IOException 

Source Link

Document

Construct the preferred type of SequenceFile Writer.

Usage

From source file:edu.arizona.cs.hadoop.fs.irods.output.HirodsSequenceFileAsBinaryOutputFormat.java

License:Apache License

protected SequenceFile.Writer getSequenceWriter(TaskAttemptContext context, Class<?> keyClass,
        Class<?> valueClass) throws IOException {
    Configuration conf = context.getConfiguration();

    CompressionCodec codec = null;// w  ww . ja  v a2 s .  co m
    CompressionType compressionType = CompressionType.NONE;
    if (getCompressOutput(context)) {
        // find the kind of compression to do
        compressionType = getOutputCompressionType(context);
        // find the right codec
        Class<?> codecClass = getOutputCompressorClass(context, DefaultCodec.class);
        codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, conf);
    }
    // get the path of the temporary output file
    Path file = getDefaultWorkFile(context, "");
    FileSystem fs = file.getFileSystem(conf);
    return SequenceFile.createWriter(fs, conf, file, keyClass, valueClass, compressionType, codec, context);
}

From source file:edu.arizona.cs.hadoop.fs.irods.output.HirodsSequenceFileOutputFormat.java

License:Apache License

@Override
public RecordWriter<K, V> getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException {
    Configuration conf = context.getConfiguration();

    CompressionCodec codec = null;/*w  w w. j a v  a  2  s . c o m*/
    CompressionType compressionType = CompressionType.NONE;
    if (getCompressOutput(context)) {
        // find the kind of compression to do
        compressionType = getOutputCompressionType(context);

        // find the right codec
        Class<?> codecClass = getOutputCompressorClass(context, DefaultCodec.class);
        codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, conf);
    }
    // get the path of the temporary output file 
    Path file = getDefaultWorkFile(context, "");
    FileSystem fs = file.getFileSystem(conf);
    final SequenceFile.Writer out = SequenceFile.createWriter(fs, conf, file, context.getOutputKeyClass(),
            context.getOutputValueClass(), compressionType, codec, context);

    return new RecordWriter<K, V>() {

        @Override
        public void write(K key, V value) throws IOException {

            out.append(key, value);
        }

        @Override
        public void close(TaskAttemptContext context) throws IOException {
            out.close();
        }
    };
}

From source file:org.archive.bacon.io.SequenceFileStorage.java

License:Apache License

/**
 * Most of this method is cut/pasted from the Hadoop
 * SequenceFileOutputFormat.  The big difference is that we use the
 * key and value types given to this Pig storage class rather than
 * using the ones set by the job configuration.
 *///w w  w .  j av a2  s .  c om
public OutputFormat getOutputFormat() throws IOException {
    return new SequenceFileOutputFormat() {
        public RecordWriter getRecordWriter(TaskAttemptContext context)
                throws IOException, InterruptedException {
            Configuration conf = context.getConfiguration();

            Class keyClass, valueClass;
            try {
                keyClass = conf.getClassByName(keyType);
                valueClass = conf.getClassByName(valueType);
            } catch (ClassNotFoundException cnfe) {
                throw new IOException(cnfe);
            }

            // Instantiate null objects for the key and value types.
            // See getWritable() for their use.
            try {
                nullKey = (Writable) keyClass.newInstance();
                nullValue = (Writable) valueClass.newInstance();
            } catch (ReflectiveOperationException roe) {
                throw new IOException(roe);
            }

            CompressionCodec codec = null;
            CompressionType compressionType = CompressionType.NONE;
            if (getCompressOutput(context)) {
                // find the kind of compression to do
                compressionType = getOutputCompressionType(context);

                // find the right codec
                Class<?> codecClass = getOutputCompressorClass(context, DefaultCodec.class);
                codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, conf);
            }
            // get the path of the temporary output file 
            Path file = getDefaultWorkFile(context, "");
            FileSystem fs = file.getFileSystem(conf);
            final SequenceFile.Writer out = SequenceFile.createWriter(fs, conf, file, keyClass, valueClass,
                    compressionType, codec, context);

            return new RecordWriter() {

                public void write(Object key, Object value) throws IOException {

                    out.append(key, value);
                }

                public void close(TaskAttemptContext context) throws IOException {
                    out.close();
                }
            };
        }
    };
}

From source file:org.archive.hadoop.pig.SequenceFileStorage.java

License:Apache License

/**
 * Most of this method is cut/pasted from the Hadoop
 * SequenceFileOutputFormat.  The big difference is that we use the
 * key and value types given to this Pig storage class rather than
 * using the ones set by the job configuration.
 *//*from  w  w w . j  av  a2s . co  m*/
public OutputFormat getOutputFormat() throws IOException {
    return new SequenceFileOutputFormat() {
        public RecordWriter getRecordWriter(TaskAttemptContext context)
                throws IOException, InterruptedException {
            Configuration conf = context.getConfiguration();

            Class keyClass, valueClass;
            try {
                keyClass = conf.getClassByName(keyType);
                valueClass = conf.getClassByName(valueType);
            } catch (ClassNotFoundException cnfe) {
                throw new IOException(cnfe);
            }

            // Instantiate null objects for the key and value types.
            // See getWritable() for their use.
            try {
                nullKey = (Writable) keyClass.newInstance();
                nullValue = (Writable) valueClass.newInstance();
            } catch (Exception roe) {
                throw new IOException(roe);
            }

            CompressionCodec codec = null;
            CompressionType compressionType = CompressionType.NONE;
            if (getCompressOutput(context)) {
                // find the kind of compression to do
                compressionType = getOutputCompressionType(context);

                // find the right codec
                Class<?> codecClass = getOutputCompressorClass(context, DefaultCodec.class);
                codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, conf);
            }
            // get the path of the temporary output file 
            Path file = getDefaultWorkFile(context, "");
            FileSystem fs = file.getFileSystem(conf);
            final SequenceFile.Writer out = SequenceFile.createWriter(fs, conf, file, keyClass, valueClass,
                    compressionType, codec, context);

            return new RecordWriter() {

                public void write(Object key, Object value) throws IOException {

                    out.append(key, value);
                }

                public void close(TaskAttemptContext context) throws IOException {
                    out.close();
                }
            };
        }
    };
}

From source file:org.godhuli.rhipe.RHSequenceAsTextOutputFormat.java

License:Apache License

public RecordWriter<RHBytesWritable, RHBytesWritable> getRecordWriter(final TaskAttemptContext context)
        throws IOException, InterruptedException {
    final Configuration conf = context.getConfiguration();
    CompressionCodec codec = null;//from  ww w . j a  v a 2  s.c o  m
    CompressionType compressionType = CompressionType.NONE;
    String squote = conf.get("rhipe_string_quote");
    if (squote == null) {
        squote = "";
    }
    if (getCompressOutput(context)) {
        compressionType = getOutputCompressionType(context);
        final Class<?> codecClass = getOutputCompressorClass(context, DefaultCodec.class);
        codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, conf);
    }
    final Path file = getDefaultWorkFile(context, "");
    final FileSystem fs = file.getFileSystem(conf);
    final SequenceFile.Writer out = SequenceFile.createWriter(fs, conf, file, org.apache.hadoop.io.Text.class,
            org.apache.hadoop.io.Text.class, compressionType, codec, context);
    return new ElementWriter(out, squote);
}

From source file:org.sf.xrime.algorithms.BC.BCAlgorithm.java

License:Apache License

private void insertInitNode(String fileName) throws IOException {
    if (initVertex != null) {
        Path filePath = new Path(context.getSource().getPaths().get(0).toString() + "/" + fileName);
        Path path = new Path(jobConf.getWorkingDirectory(), filePath);

        FileSystem fs = path.getFileSystem(jobConf);
        CompressionCodec codec = null;/*from   w w  w.  j av  a 2 s  .c o m*/
        CompressionType compressionType = CompressionType.NONE;
        if (jobConf.getBoolean("mapred.output.compress", false)) {
            // find the kind of compression to do            
            String val = jobConf.get("mapred.output.compression.type", CompressionType.RECORD.toString());
            compressionType = CompressionType.valueOf(val);

            // find the right codec
            Class<? extends CompressionCodec> codecClass = DefaultCodec.class;

            String name = jobConf.get("mapred.output.compression.codec");
            if (name != null) {
                try {
                    codecClass = jobConf.getClassByName(name).asSubclass(CompressionCodec.class);
                } catch (ClassNotFoundException e) {
                    throw new IllegalArgumentException("Compression codec " + name + " was not found.", e);
                }
            }
            codec = ReflectionUtils.newInstance(codecClass, jobConf);
        }

        SequenceFile.Writer out = SequenceFile.createWriter(fs, jobConf, path, Text.class,
                LabeledAdjBiSetVertex.class, compressionType, codec, null);

        BCLabel nowLabel = new BCLabel();

        nowLabel.setStatus(-1);

        initVertex.setLabel(BCLabel.bcLabelPathsKey, nowLabel);

        out.append(new Text(initVertex.getId()), initVertex);
        out.close();
    }
}

From source file:org.sf.xrime.algorithms.BCApproximation.BCAlgorithm.java

License:Apache License

private void insertInitNode(String fileName) throws IOException {
    if (initVertex != null) {
        // get the path of the Init output file
        Path filePath = new Path(context.getSource().getPaths().get(0).toString() + "/" + fileName);
        Path path = new Path(jobConf.getWorkingDirectory(), filePath);

        FileSystem fs = path.getFileSystem(jobConf);
        CompressionCodec codec = null;//from   w ww. java2s  .c o m
        CompressionType compressionType = CompressionType.NONE;
        if (jobConf.getBoolean("mapred.output.compress", false)) {
            // find the kind of compression to do            
            String val = jobConf.get("mapred.output.compression.type", CompressionType.RECORD.toString());
            compressionType = CompressionType.valueOf(val);

            // find the right codec
            Class<? extends CompressionCodec> codecClass = DefaultCodec.class;

            String name = jobConf.get("mapred.output.compression.codec");
            if (name != null) {
                try {
                    codecClass = jobConf.getClassByName(name).asSubclass(CompressionCodec.class);
                } catch (ClassNotFoundException e) {
                    throw new IllegalArgumentException("Compression codec " + name + " was not found.", e);
                }
            }
            codec = ReflectionUtils.newInstance(codecClass, jobConf);
        }

        SequenceFile.Writer out = SequenceFile.createWriter(fs, jobConf, path, Text.class,
                LabeledAdjBiSetVertex.class, compressionType, codec, null);

        BCLabel nowLabel = new BCLabel();

        nowLabel.setStatus(-1);

        initVertex.setLabel(BCLabel.bcLabelPathsKey, nowLabel);

        out.append(new Text(initVertex.getId()), initVertex);
        out.close();
    }
}

From source file:org.sf.xrime.algorithms.BFS.alg_1.BFSAlgorithm.java

License:Apache License

/**
 * Insert initial vertex. We will store this vertex in a SequenceFile.
 * //from  w w w.j a v a2s . c o m
 * @param fileName
 *            SequenceFile name to store the vertex.
 * @throws IOException
 */
private void insertInitNode(String fileName) throws IOException {
    if (initVertex != null) {
        BFSLabel nowLabel = (BFSLabel) initVertex.getLabel(BFSLabel.bfsLabelPathsKey);

        if (nowLabel == null) {
            nowLabel = new BFSLabel();
        }

        nowLabel.setStatus(-1);
        initVertex.setLabel(BFSLabel.bfsLabelPathsKey, nowLabel);

        // get the path of the Init output file
        Path filePath = new Path(context.getSource().getPaths().get(0).toString() + "/" + fileName);

        Path path = new Path(jobConf.getWorkingDirectory(), filePath);
        FileSystem fs = path.getFileSystem(jobConf);
        CompressionCodec codec = null;
        CompressionType compressionType = CompressionType.NONE;
        if (jobConf.getBoolean("mapred.output.compress", false)) {
            // find the kind of compression to do
            String val = jobConf.get("mapred.output.compression.type", CompressionType.RECORD.toString());
            compressionType = CompressionType.valueOf(val);

            // find the right codec
            Class<? extends CompressionCodec> codecClass = DefaultCodec.class;

            String name = jobConf.get("mapred.output.compression.codec");
            if (name != null) {
                try {
                    codecClass = jobConf.getClassByName(name).asSubclass(CompressionCodec.class);
                } catch (ClassNotFoundException e) {
                    throw new IllegalArgumentException("Compression codec " + name + " was not found.", e);
                }
            }
            codec = ReflectionUtils.newInstance(codecClass, jobConf);
        }

        SequenceFile.Writer out = SequenceFile.createWriter(fs, jobConf, path, Text.class,
                LabeledAdjVertex.class, compressionType, codec, null);

        out.append(new Text(initVertex.getId()), initVertex);
        out.close();
    }
}

From source file:org.sf.xrime.algorithms.layout.radialtree.RadialTreeAlgorithm.java

License:Apache License

/**
 * Insert initial vertex. We will store this vertex in a SequenceFile.
 * /* w  ww .  j  a  v a 2s.com*/
 * @param fileName
 *          SequenceFile name to store the vertex.
 * @throws IOException
 */
private void insertInitNode(String fileName) throws IOException {
    if (initVertex != null) {
        RadialTreeLabel nowLabel = (RadialTreeLabel) initVertex
                .getLabel(RadialTreeLabel.RadialTreeLabelPathsKey);

        if (nowLabel == null) {
            nowLabel = new RadialTreeLabel();
        }

        nowLabel.setStatus(-1);
        initVertex.setLabel(RadialTreeLabel.RadialTreeLabelPathsKey, nowLabel);

        // get the path of the Init output file
        Path filePath = new Path(context.getSource().getPaths().get(0).toString() + "/" + fileName);

        Path path = new Path(jobConf.getWorkingDirectory(), filePath);
        FileSystem fs = path.getFileSystem(jobConf);
        CompressionCodec codec = null;
        CompressionType compressionType = CompressionType.NONE;
        if (jobConf.getBoolean("mapred.output.compress", false)) {
            // find the kind of compression to do
            String val = jobConf.get("mapred.output.compression.type", CompressionType.RECORD.toString());
            compressionType = CompressionType.valueOf(val);

            // find the right codec
            Class<? extends CompressionCodec> codecClass = DefaultCodec.class;

            String name = jobConf.get("mapred.output.compression.codec");
            if (name != null) {
                try {
                    codecClass = jobConf.getClassByName(name).asSubclass(CompressionCodec.class);
                } catch (ClassNotFoundException e) {
                    throw new IllegalArgumentException("Compression codec " + name + " was not found.", e);
                }
            }
            codec = ReflectionUtils.newInstance(codecClass, jobConf);
        }

        SequenceFile.Writer out = SequenceFile.createWriter(fs, jobConf, path, Text.class,
                LabeledAdjVertex.class, compressionType, codec, null);

        out.append(new Text(initVertex.getId()), initVertex);
        out.close();
    }
}

From source file:org.sf.xrime.algorithms.setBFS.SetBFSAlgorithm.java

License:Apache License

/**
 * Insert initial vertexes. We will store this vertexes in a SequenceFile.  
 * @param fileName SequenceFile name to store the vertexes.
 * @throws IOException/*  w ww. j  a  v  a 2s . c om*/
 */
private void insertInitNode(String fileName) throws IOException {
    if (initVertexs != null) {
        // get the path of the Init output file
        Path filePath = new Path(context.getSource().getPaths().get(0).toString() + "/" + fileName);

        Path path = new Path(jobConf.getWorkingDirectory(), filePath);
        FileSystem fs = path.getFileSystem(jobConf);
        CompressionCodec codec = null;
        CompressionType compressionType = CompressionType.NONE;
        if (jobConf.getBoolean("mapred.output.compress", false)) {
            // find the kind of compression to do            
            String val = jobConf.get("mapred.output.compression.type", CompressionType.RECORD.toString());
            compressionType = CompressionType.valueOf(val);

            // find the right codec
            Class<? extends CompressionCodec> codecClass = DefaultCodec.class;

            String name = jobConf.get("mapred.output.compression.codec");
            if (name != null) {
                try {
                    codecClass = jobConf.getClassByName(name).asSubclass(CompressionCodec.class);
                } catch (ClassNotFoundException e) {
                    throw new IllegalArgumentException("Compression codec " + name + " was not found.", e);
                }
            }
            codec = ReflectionUtils.newInstance(codecClass, jobConf);
        }

        SequenceFile.Writer out = SequenceFile.createWriter(fs, jobConf, path, Text.class,
                LabeledAdjSetVertex.class, compressionType, codec, null);

        for (LabeledAdjSetVertex initVertex : initVertexs) {
            SetBFSLabel nowLabel = (SetBFSLabel) initVertex.getLabel(SetBFSLabel.setBFSLabelPathsKey);

            if (nowLabel == null) {
                nowLabel = new SetBFSLabel();
            }

            nowLabel.setStatus(-1);
            nowLabel.setDistance(0);
            initVertex.setLabel(SetBFSLabel.setBFSLabelPathsKey, nowLabel);

            out.append(new Text(initVertex.getId()), initVertex);
        }

        out.close();
    }
}