Example usage for org.apache.hadoop.io.compress CompressionCodecFactory getCodecClasses

List of usage examples for org.apache.hadoop.io.compress CompressionCodecFactory getCodecClasses

Introduction

In this page you can find the example usage for org.apache.hadoop.io.compress CompressionCodecFactory getCodecClasses.

Prototype

public static List<Class<? extends CompressionCodec>> getCodecClasses(Configuration conf) 

Source Link

Document

Get the list of codecs discovered via a Java ServiceLoader, or listed in the configuration.

Usage

From source file:com.cloudera.flume.handlers.hdfs.CustomDfsSink.java

License:Apache License

public static CompressionCodec getCodec(Configuration conf, String codecName) {
    List<Class<? extends CompressionCodec>> codecs = CompressionCodecFactory
            .getCodecClasses(FlumeConfiguration.get());
    // Wish we could base this on DefaultCodec but appears not all codec's
    // extend DefaultCodec(Lzo)
    CompressionCodec codec = null;//  w  w  w .  ja  va 2s.c  om
    ArrayList<String> codecStrs = new ArrayList<String>();
    codecStrs.add("None");
    for (Class<? extends CompressionCodec> cls : codecs) {
        codecStrs.add(cls.getSimpleName());

        if (codecMatches(cls, codecName)) {
            try {
                codec = cls.newInstance();
            } catch (InstantiationException e) {
                LOG.error("Unable to instantiate " + cls + " class");
            } catch (IllegalAccessException e) {
                LOG.error("Unable to access " + cls + " class");
            }
        }
    }

    if (codec == null) {
        if (!codecName.equalsIgnoreCase("None")) {
            throw new IllegalArgumentException(
                    "Unsupported compression codec " + codecName + ".  Please choose from: " + codecStrs);
        }
    } else if (codec instanceof Configurable) {
        // Must check instanceof codec as BZip2Codec doesn't inherit Configurable
        // Must set the configuration for Configurable objects that may or do use
        // native libs
        ((Configurable) codec).setConf(conf);
    }
    return codec;
}

From source file:com.cloudera.flume.handlers.hdfs.TestEscapedCustomOutputDfs.java

License:Apache License

public void testCodecs() {
    LOG.info(CompressionCodecFactory.getCodecClasses(FlumeConfiguration.get()));
}

From source file:com.facebook.hiveio.common.HadoopNative.java

License:Apache License

/** Load native libraries */
public static synchronized void requireHadoopNative() {
    if (LOADED) {
        return;/*  ww w .j a  v a2  s.  c  o  m*/
    }
    if (ERROR != null) {
        throw new RuntimeException("failed to load Hadoop native library", ERROR);
    }
    try {
        loadLibrary("hadoop");
        Field field = NativeCodeLoader.class.getDeclaredField("nativeCodeLoaded");
        field.setAccessible(true);
        field.set(null, true);

        // Use reflection to HACK fix caching bug in CodecPool. This hack works
        // from a concurrency perspective in this codebase because we assume that
        // all threads that will access the CodecPool will have already been
        // synchronized by calling requireHadoopNative() at some point.
        field = CodecPool.class.getDeclaredField("COMPRESSOR_POOL");
        setFinalStatic(field, new HackListMap());
        field = CodecPool.class.getDeclaredField("DECOMPRESSOR_POOL");
        setFinalStatic(field, new HackListMap());

        List<Class<? extends CompressionCodec>> codecs = CompressionCodecFactory
                .getCodecClasses(new Configuration());
        LOG.info("Compression Codecs: {}", codecs);

        LOADED = true;
        // CHECKSTYLE: stop IllegalCatch
    } catch (Throwable t) {
        // CHECKSTYLE: resume IllegalCatch
        ERROR = t;
        throw new RuntimeException("failed to load Hadoop native library", ERROR);
    }
}

From source file:com.vf.flume.sink.hdfs.HDFSEventSink.java

License:Apache License

@VisibleForTesting
static CompressionCodec getCodec(String codecName) {
    Configuration conf = new Configuration();
    List<Class<? extends CompressionCodec>> codecs = CompressionCodecFactory.getCodecClasses(conf);
    // Wish we could base this on DefaultCodec but appears not all codec's
    // extend DefaultCodec(Lzo)
    CompressionCodec codec = null;//ww w.j a  v  a2  s . c  o m
    ArrayList<String> codecStrs = new ArrayList<String>();
    codecStrs.add("None");
    for (Class<? extends CompressionCodec> cls : codecs) {
        codecStrs.add(cls.getSimpleName());
        if (codecMatches(cls, codecName)) {
            try {
                codec = cls.newInstance();
            } catch (InstantiationException e) {
                LOG.error("Unable to instantiate " + cls + " class");
            } catch (IllegalAccessException e) {
                LOG.error("Unable to access " + cls + " class");
            }
        }
    }

    if (codec == null) {
        if (!codecName.equalsIgnoreCase("None")) {
            throw new IllegalArgumentException(
                    "Unsupported compression codec " + codecName + ".  Please choose from: " + codecStrs);
        }
    } else if (codec instanceof org.apache.hadoop.conf.Configurable) {
        // Must check instanceof codec as BZip2Codec doesn't inherit Configurable
        // Must set the configuration for Configurable objects that may or do use
        // native libs
        ((org.apache.hadoop.conf.Configurable) codec).setConf(conf);
    }
    return codec;
}

From source file:name.abhijitsarkar.hadoop.io.IOUtils.java

License:Open Source License

private static CompressionCodec getCodecByClassName(final String className, final Configuration conf)
        throws InstantiationException, IllegalAccessException {
    CompressionCodec codec = null;//from ww  w  .  ja va2  s. c om
    List<Class<? extends CompressionCodec>> allCodecClasses = CompressionCodecFactory.getCodecClasses(conf);
    for (Class<? extends CompressionCodec> clazz : allCodecClasses) {
        if (clazz.getName().equals(className)) {
            codec = ReflectionUtils.newInstance(clazz, conf);
            break;
        }
    }
    return codec;
}

From source file:org.apache.sqoop.io.CodecMap.java

License:Apache License

/**
 * Find the relevant compression codec for the codec's canonical class name
 * or by codec alias.//  w  w  w .  j av a  2 s . co  m
 * <p>
 * Codec aliases are case insensitive.
 * <p>
 * The code alias is the short class name (without the package name).
 * If the short class name ends with 'Codec', then there are two aliases for
 * the codec, the complete short class name and the short class name without
 * the 'Codec' ending. For example for the 'GzipCodec' codec class name the
 * alias are 'gzip' and 'gzipcodec'.
 * <p>
 * Note: When HADOOP-7323 is available this method can be replaced with a call
 * to CompressionCodecFactory.
 * @param codecName the canonical class name of the codec or the codec alias
 * @return the codec object or null if none matching the name were found
 */
private static CompressionCodec getCodecByName(String codecName, Configuration conf) {
    List<Class<? extends CompressionCodec>> codecs = CompressionCodecFactory.getCodecClasses(conf);
    for (Class<? extends CompressionCodec> cls : codecs) {
        if (codecMatches(cls, codecName)) {
            return ReflectionUtils.newInstance(cls, conf);
        }
    }
    return null;
}

From source file:org.springframework.data.hadoop.serialization.HdfsWriterTest.java

License:Apache License

/**
 * Test compressed write of source file against ALL codecs supported by Hadoop.
 *///from   w w  w. j a v a  2 s . co  m
@Test
public void testCompressedWriteUsingHadoopCodecs() {
    // Might be re-worked to support parameterized tests.
    // See @Parameterized and Parameterized.Parameters

    hdfsOutputDir += "hadoop-codecs/";

    final StringBuilder exceptions = new StringBuilder();

    // Get a list of all codecs supported by Hadoop
    for (Class<? extends CompressionCodec> codecClass : CompressionCodecFactory
            .getCodecClasses(configuration)) {
        try {
            testResourceWrite(RESOURCE_FORMAT, 1,
                    ReflectionUtils.newInstance(codecClass, configuration), /* useCodecAlias */
                    true);
        } catch (Exception exc) {
            exceptions.append(codecClass.getName() + " not supported. Details: " + exc.getMessage() + "\n");
        }
    }

    assertTrue(exceptions.toString(), exceptions.length() == 0);
}