Example usage for org.apache.hadoop.conf Configuration addResource

List of usage examples for org.apache.hadoop.conf Configuration addResource

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration addResource.

Prototype

public void addResource(InputStream in, String name) 

Source Link

Document

Add a configuration resource.

Usage

From source file:com.linkedin.drelephant.mapreduce.fetchers.MapReduceFSFetcherHadoop2.java

License:Apache License

@Override
public MapReduceApplicationData fetchData(AnalyticJob job) throws IOException {
    DataFiles files = getHistoryFiles(job);
    String confFile = files.getJobConfPath();
    String histFile = files.getJobHistPath();
    String appId = job.getAppId();
    String jobId = Utils.getJobIdFromApplicationId(appId);

    MapReduceApplicationData jobData = new MapReduceApplicationData();
    jobData.setAppId(appId).setJobId(jobId);

    // Fetch job config
    Configuration jobConf = new Configuration(false);
    jobConf.addResource(_fs.open(new Path(confFile)), confFile);
    Properties jobConfProperties = new Properties();
    for (Map.Entry<String, String> entry : jobConf) {
        jobConfProperties.put(entry.getKey(), entry.getValue());
    }/*from w w  w  .  j a v a2  s .co m*/
    jobData.setJobConf(jobConfProperties);

    // Check if job history file is too large and should be throttled
    if (_fs.getFileStatus(new Path(histFile)).getLen() > _maxLogSizeInMB * FileUtils.ONE_MB) {
        String errMsg = "The history log of MapReduce application: " + appId + " is over the limit size of "
                + _maxLogSizeInMB + " MB, the parsing process gets throttled.";
        logger.warn(errMsg);
        jobData.setDiagnosticInfo(errMsg);
        jobData.setSucceeded(false); // set succeeded to false to avoid heuristic analysis
        return jobData;
    }

    // Analyze job history file
    JobHistoryParser parser = new JobHistoryParser(_fs, histFile);
    JobHistoryParser.JobInfo jobInfo = parser.parse();
    IOException parseException = parser.getParseException();
    if (parseException != null) {
        throw new RuntimeException("Could not parse history file " + histFile, parseException);
    }

    jobData.setSubmitTime(jobInfo.getSubmitTime());
    jobData.setStartTime(jobInfo.getLaunchTime());
    jobData.setFinishTime(jobInfo.getFinishTime());

    String state = jobInfo.getJobStatus();
    if (state.equals("SUCCEEDED")) {

        jobData.setSucceeded(true);

        // Fetch job counter
        MapReduceCounterData jobCounter = getCounterData(jobInfo.getTotalCounters());

        // Fetch task data
        Map<TaskID, JobHistoryParser.TaskInfo> allTasks = jobInfo.getAllTasks();
        List<JobHistoryParser.TaskInfo> mapperInfoList = new ArrayList<JobHistoryParser.TaskInfo>();
        List<JobHistoryParser.TaskInfo> reducerInfoList = new ArrayList<JobHistoryParser.TaskInfo>();
        for (JobHistoryParser.TaskInfo taskInfo : allTasks.values()) {
            if (taskInfo.getTaskType() == TaskType.MAP) {
                mapperInfoList.add(taskInfo);
            } else {
                reducerInfoList.add(taskInfo);
            }
        }
        if (jobInfo.getTotalMaps() > MAX_SAMPLE_SIZE) {
            logger.debug(jobId + " total mappers: " + mapperInfoList.size());
        }
        if (jobInfo.getTotalReduces() > MAX_SAMPLE_SIZE) {
            logger.debug(jobId + " total reducers: " + reducerInfoList.size());
        }
        MapReduceTaskData[] mapperList = getTaskData(jobId, mapperInfoList);
        MapReduceTaskData[] reducerList = getTaskData(jobId, reducerInfoList);

        jobData.setCounters(jobCounter).setMapperData(mapperList).setReducerData(reducerList);
    } else if (state.equals("FAILED")) {

        jobData.setSucceeded(false);
        jobData.setDiagnosticInfo(jobInfo.getErrorInfo());
    } else {
        // Should not reach here
        throw new RuntimeException("Job state not supported. Should be either SUCCEEDED or FAILED");
    }

    return jobData;
}

From source file:ezbake.helpers.cdh.Cdh2EzProperties.java

License:Apache License

public Configuration getConfiguration(InputStreamDataSource configStream) throws IOException {
    Configuration configuration = new Configuration(false);
    try (ZipArchiveInputStream zipInputStream = new ZipArchiveInputStream(configStream.getInputStream())) {
        ZipArchiveEntry zipEntry = zipInputStream.getNextZipEntry();
        while (zipEntry != null) {
            String name = zipEntry.getName();
            if (name.endsWith("core-site.xml") || name.endsWith("hdfs-site.xml")) {
                if (verbose)
                    System.err.println("Reading \"" + name + "\" into Configuration.");
                ByteArrayOutputStream boas = new ByteArrayOutputStream();
                IOUtils.copy(zipInputStream, boas);
                configuration.addResource(new ByteArrayInputStream(boas.toByteArray()), name);
            }//  www . j  ava 2  s  .co  m
            zipEntry = zipInputStream.getNextZipEntry();
        }
    }
    return configuration;
}

From source file:org.apache.kylin.source.kafka.config.KafkaConsumerPropertiesTest.java

License:Apache License

@Test
public void testLoadKafkaPropertiesAsHadoopJobConf()
        throws IOException, ParserConfigurationException, SAXException {
    KafkaConsumerProperties kafkaConsumerProperties = KafkaConsumerProperties.getInstanceFromEnv();
    Configuration conf = new Configuration(false);
    conf.addResource(new FileInputStream(new File(kafkaConsumerProperties.getKafkaConsumerHadoopJobConf())),
            KafkaConsumerProperties.KAFKA_CONSUMER_FILE);
    assertEquals("30000", conf.get("session.timeout.ms"));

    Properties prop = KafkaConsumerProperties.extractKafkaConfigToProperties(conf);
    assertEquals("30000", prop.getProperty("session.timeout.ms"));
}

From source file:org.apache.metamodel.util.HdfsResource.java

License:Apache License

private void addResourceIfExists(Configuration conf, File hadoopConfigurationDirectory, String filename) {
    final File file = new File(hadoopConfigurationDirectory, filename);
    if (file.exists()) {
        final InputStream inputStream = FileHelper.getInputStream(file);
        conf.addResource(inputStream, filename);
    }/*w  w w  .  j  av  a 2 s .c om*/
}