Example usage for org.apache.hadoop.conf Configuration toString

List of usage examples for org.apache.hadoop.conf Configuration toString

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration toString.

Prototype

@Override
    public String toString() 

Source Link

Usage

From source file:com.asp.tranlog.ImportTsv.java

License:Apache License

/**
 * Main entry point.//w w w . j a  v  a  2s.  c  om
 * 
 * @param args
 *            The command line parameters.
 * @throws Exception
 *             When running the job fails.
 */
public static void main(String[] args) throws Exception {
    System.out.println("==============================================");
    Configuration conf = HBaseConfiguration.create();

    LOG.error(PRE + "conf.toString() == " + conf.toString());

    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if (otherArgs.length < 2) {
        usage("Wrong number of arguments: " + otherArgs.length);
        System.exit(-1);
    }

    String columns[] = conf.getStrings(COLUMNS_CONF_KEY);

    if (columns == null) {
        usage("No columns specified. Please specify with -D" + COLUMNS_CONF_KEY + "=...");
        System.exit(-1);
    }

    // Make sure one or more columns are specified
    if (columns.length < 2) {
        usage("One or more columns in addition to the row key are required");
        System.exit(-1);
    }
    columns = conf.getStrings(COLUMNS_CONF_KEY);
    if (columns == null) {
        usage("One or more key columns are required");
        System.exit(-1);
    }

    Job job = createSubmittableJob(conf, otherArgs);

    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:com.ds.lzo.DeprecatedLzoLineRecordReaderForCombined.java

License:Open Source License

public DeprecatedLzoLineRecordReaderForCombined(Configuration conf, FileSplit split) throws IOException {
    LOG.warn("split start: " + split.getStart());
    LOG.warn("split length: " + split.getLength());
    String[] locs = split.getLocations();
    for (String loc : locs) {
        LOG.warn("location: " + loc);
    }/*  www.j  a  v a 2s .  c  om*/
    start = split.getStart();
    end = start + split.getLength();
    LOG.warn("split end: " + end);
    final Path file = split.getPath();
    LOG.warn("file: " + file.getName());
    LOG.warn("INT split start: " + (int) split.getStart());
    LOG.warn("INT split length: " + (int) split.getLength());
    LOG.warn("INT split end: " + (int) end);

    FileSystem fs = file.getFileSystem(conf);
    codecFactory = new CompressionCodecFactory(conf);
    final CompressionCodec codec = codecFactory.getCodec(file);
    LOG.warn("codec: " + codec.toString());
    LOG.warn("config: " + conf.toString());
    if (codec == null) {
        throw new IOException("No LZO codec found, cannot run.");
    }

    // Open the file and seek to the next split.
    fileIn = fs.open(file);
    // Create input stream and read the file header.
    in = new LineReader(codec.createInputStream(fileIn), conf);
    if (start != 0) {
        fileIn.seek(start);
        LOG.warn("fileIn position: " + fileIn.getPos());
        LOG.warn("buffer size: " + conf.get("io.file.buffer.size"));

        // Read and ignore the first line.
        in.readLine(new Text());
        start = fileIn.getPos();
    }

    pos = start;
}

From source file:com.ikanow.aleph2.search_service.elasticsearch.utils.TestElasticsearchHiveUtils.java

License:Apache License

@Test
public void test_getHiveConfiguration() throws IOException {

    //(this is a pretty minimal test)

    {//from   ww  w  . jav a 2 s  .  c  o  m
        final GlobalPropertiesBean globals = BeanTemplateUtils.build(GlobalPropertiesBean.class).done().get();

        try {
            ElasticsearchHiveUtils.getHiveConfiguration(globals);
            fail("Should have errored");
        } catch (Exception e) {
        } //success
    }

    {
        final String tmp_dir = System.getProperty("java.io.tmpdir");
        final File dummy_config = new File(tmp_dir + "/hive-site.xml");
        if (!dummy_config.exists()) {
            System.out.println("CREATED file: " + dummy_config.toString());
            dummy_config.createNewFile();
        }
        final GlobalPropertiesBean globals = BeanTemplateUtils.build(GlobalPropertiesBean.class)
                .with(GlobalPropertiesBean::local_yarn_config_dir, tmp_dir).done().get();

        final Configuration config = ElasticsearchHiveUtils.getHiveConfiguration(globals);

        assertTrue("config should contain: " + dummy_config.toString() + " vs " + config.toString(),
                config.toString().contains("/hive-site.xml"));
    }
}

From source file:com.koda.integ.hbase.test.ConfigHelperTest.java

License:Open Source License

public void testConfigCopy() {
    LOG.info("Test config copy started");
    Configuration cfg1 = new Configuration();

    cfg1.set("Key1", "Value1");
    cfg1.set("Key2", "Value2");
    cfg1.set("Key3", "Value3");

    Configuration cfg2 = ConfigHelper.copy(cfg1);
    assertTrue(cfg1.toString().equals(cfg2.toString()));
    LOG.info("Test config copy finished OK");

}

From source file:com.linkedin.drelephant.DrElephant.java

License:Apache License

public DrElephant() throws IOException {
    HDFSContext.load();//  ww w  .j  a  va  2  s.  c  om
    Configuration configuration = ElephantContext.instance().getAutoTuningConf();
    autoTuningEnabled = configuration.getBoolean(AUTO_TUNING_ENABLED, false);
    logger.debug("Auto Tuning Configuration: " + configuration.toString());
    _elephant = new ElephantRunner();
    if (autoTuningEnabled) {
        _autoTuner = new AutoTuner();
        _autoTunerThread = new Thread(_autoTuner, "Auto Tuner Thread");
    }
}

From source file:com.teradata.adsbserde.AdsbSerDe.java

@Override
public void initialize(Configuration c, Properties tbl) throws SerDeException {
    MyLogger.println("AsdbSerDe called ... ");
    MyLogger.println("c: " + c.toString());
    MyLogger.println("tbl: " + tbl.toString());

    System.out.println(PROP_DEBUG_ENABLED + "=" + tbl.getProperty(PROP_DEBUG_ENABLED));
    MyLogger.enabled = "true".equalsIgnoreCase(tbl.getProperty(PROP_DEBUG_ENABLED));

    // Generate the regular expression used to extract the ADSB data pairs.
    // This is being done here in the anticipation that one day, parts of this
    // e.g. the separator character - might be accepted as a property of the
    // table definition.
    pairs = Pattern.compile("([\\S^]+)\\s+([\\S$]+)");

    String colNamesStr = tbl.getProperty(serdeConstants.LIST_COLUMNS);
    List<String> colNamesWrk = Arrays.asList(colNamesStr.split(","));
    colNames = new LinkedList();
    for (String name : colNamesWrk) {
        if (name != null) {
            colNames.add(name.toLowerCase());
        } else {//from   ww w . j  a v a 2s  .  c om
            colNames.add(name);
        }
    }

    MyLogger.println("colNames: " + colNamesStr);

    // Get a list of TypeInfos for the columns. This list lines up with 
    // the list of column names.
    String colTypesStr = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
    List<TypeInfo> colTypes = TypeInfoUtils.getTypeInfosFromTypeString(colTypesStr);
    MyLogger.println("colTypes: " + colTypesStr);

    rowTypeInfo = (StructTypeInfo) TypeInfoFactory.getStructTypeInfo(colNames, colTypes);
    MyLogger.println("rowTypeInfo: " + rowTypeInfo);
    MyLogger.println("clock rowTypeInfo: " + rowTypeInfo.getStructFieldTypeInfo("clock"));
    MyLogger.println("speed rowTypeInfo: " + rowTypeInfo.getStructFieldTypeInfo("speed"));

    rowOI = TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(rowTypeInfo);
    MyLogger.println("rowOI: " + rowOI);

}

From source file:com.thinkbiganalytics.datalake.authorization.hdfs.HDFSUtil.java

License:Apache License

/**
 * @param configResources : Hadoop configuration resource
 *//*  w ww .  ja  v  a2s  .c o m*/
public static Configuration getConfigurationFromResources(String configResources) throws IOException {
    boolean foundResources = false;
    final Configuration config = new Configuration();
    if (null != configResources) {
        String[] resources = configResources.split(",");
        for (String resource : resources) {
            config.addResource(new Path(resource.trim()));
            foundResources = true;
        }
    }

    if (!foundResources) {
        // check that at least 1 non-default resource is available on the classpath
        String configStr = config.toString();
        for (String resource : configStr.substring(configStr.indexOf(':') + 1).split(",")) {
            if (!resource.contains("default") && config.getResource(resource.trim()) != null) {
                foundResources = true;
                break;
            }
        }
    }

    if (!foundResources) {
        throw new IOException("Could not find any of the " + "hadoop conf" + " on the classpath");
    }
    return config;
}

From source file:com.thinkbiganalytics.nifi.security.ApplySecurityPolicy.java

License:Apache License

public static Configuration getConfigurationFromResources(String configResources) throws IOException {
    boolean foundResources = false;
    final Configuration config = new Configuration();
    if (null != configResources) {
        String[] resources = configResources.split(",");
        for (String resource : resources) {
            config.addResource(new Path(resource.trim()));
            foundResources = true;//from ww w .  j av a  2s.  c  o m
        }
    }

    if (!foundResources) {
        // check that at least 1 non-default resource is available on the classpath
        String configStr = config.toString();
        for (String resource : configStr.substring(configStr.indexOf(":") + 1).split(",")) {
            if (!resource.contains("default") && config.getResource(resource.trim()) != null) {
                foundResources = true;
                break;
            }
        }
    }

    if (!foundResources) {
        throw new IOException("Could not find any of the " + "hadoop conf" + " on the classpath");
    }
    return config;
}

From source file:com.thinkbiganalytics.nifi.v2.hdfs.AbstractHadoopProcessor.java

License:Apache License

private static Configuration getConfigurationFromResources(String configResources) throws IOException {
    boolean foundResources = false;
    final Configuration config = new Configuration();
    if (null != configResources) {
        String[] resources = configResources.split(",");
        for (String resource : resources) {
            config.addResource(new Path(resource.trim()));
            foundResources = true;// www  .  j  av a  2s  . c  om
        }
    }

    if (!foundResources) {
        // check that at least 1 non-default resource is available on the classpath
        String configStr = config.toString();
        for (String resource : configStr.substring(configStr.indexOf(":") + 1).split(",")) {
            if (!resource.contains("default") && config.getResource(resource.trim()) != null) {
                foundResources = true;
                break;
            }
        }
    }

    if (!foundResources) {
        throw new IOException(
                "Could not find any of the " + HADOOP_CONFIGURATION_RESOURCES.getName() + " on the classpath");
    }
    return config;
}

From source file:com.thinkbiganalytics.nifi.v2.hdfs.AbstractHadoopProcessor.java

License:Apache License

/**
 * Reset Hadoop Configuration and FileSystem based on the supplied configuration resources.
 *
 * @param configResources for configuration
 * @param dir             the target directory
 * @param context         for context, which gives access to the principal
 * @return An HdfsResources object/*from  ww  w.  j  av  a2  s  . c  om*/
 * @throws IOException if unable to access HDFS
 */
HdfsResources resetHDFSResources(String configResources, String dir, ProcessContext context)
        throws IOException {
    // org.apache.hadoop.conf.Configuration saves its current thread context class loader to use for threads that it creates
    // later to do I/O. We need this class loader to be the NarClassLoader instead of the magical
    // NarThreadContextClassLoader.
    ClassLoader savedClassLoader = Thread.currentThread().getContextClassLoader();
    Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());

    try {
        Configuration config = getConfigurationFromResources(configResources);

        // first check for timeout on HDFS connection, because FileSystem has a hard coded 15 minute timeout
        checkHdfsUriForTimeout(config);

        // disable caching of Configuration and FileSystem objects, else we cannot reconfigure the processor without a complete
        // restart
        String disableCacheName = String.format("fs.%s.impl.disable.cache",
                FileSystem.getDefaultUri(config).getScheme());
        config.set(disableCacheName, "true");

        // If kerberos is enabled, create the file system as the kerberos principal
        // -- use RESOURCE_LOCK to guarantee UserGroupInformation is accessed by only a single thread at at time
        FileSystem fs = null;
        UserGroupInformation ugi = null;
        synchronized (RESOURCES_LOCK) {
            if (config.get("hadoop.security.authentication").equalsIgnoreCase("kerberos")) {
                String principal = context.getProperty(kerberosPrincipal).getValue();
                String keyTab = context.getProperty(kerberosKeytab).getValue();
                UserGroupInformation.setConfiguration(config);
                ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, keyTab);
                modifyConfig(context, config);
                fs = getFileSystemAsUser(config, ugi);
                lastKerberosReloginTime = System.currentTimeMillis() / 1000;
            } else {
                config.set("ipc.client.fallback-to-simple-auth-allowed", "true");
                config.set("hadoop.security.authentication", "simple");
                modifyConfig(context, config);
                fs = getFileSystem(config);
            }
        }
        getLog().info(
                "Initialized a new HDFS File System with working dir: {} default block size: {} default replication: {} config: {}",
                new Object[] { fs.getWorkingDirectory(), fs.getDefaultBlockSize(new Path(dir)),
                        fs.getDefaultReplication(new Path(dir)), config.toString() });
        return new HdfsResources(config, fs, ugi);
    } finally {
        Thread.currentThread().setContextClassLoader(savedClassLoader);
    }
}