Example usage for org.apache.commons.configuration SubsetConfiguration getKeys

List of usage examples for org.apache.commons.configuration SubsetConfiguration getKeys

Introduction

In this page you can find the example usage for org.apache.commons.configuration SubsetConfiguration getKeys.

Prototype

public Iterator getKeys() 

Source Link

Usage

From source file:org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSink.java

@Override
public void init(SubsetConfiguration conf) {
    LOG.info("Initializing Timeline metrics sink.");

    // Take the hostname from the DNS class.
    if (conf.getString("slave.host.name") != null) {
        hostName = conf.getString("slave.host.name");
    } else {/*from w  ww.  ja va 2  s.  c o m*/
        try {
            hostName = DNS.getDefaultHost(conf.getString("dfs.datanode.dns.interface", "default"),
                    conf.getString("dfs.datanode.dns.nameserver", "default"));
        } catch (UnknownHostException uhe) {
            LOG.error(uhe);
            hostName = "UNKNOWN.example.com";
        }
    }

    serviceName = getServiceName(conf);

    LOG.info("Identified hostname = " + hostName + ", serviceName = " + serviceName);

    // Load collector configs
    metricsServers = Servers.parse(conf.getString(COLLECTOR_HOST_PROPERTY), 6188);

    if (metricsServers == null || metricsServers.isEmpty()) {
        LOG.error("No Metric collector configured.");
    } else {
        collectorUri = "http://" + conf.getString(COLLECTOR_HOST_PROPERTY).trim() + "/ws/v1/timeline/metrics";
    }

    LOG.info("Collector Uri: " + collectorUri);

    int maxRowCacheSize = conf.getInt(MAX_METRIC_ROW_CACHE_SIZE,
            TimelineMetricsCache.MAX_RECS_PER_NAME_DEFAULT);
    int metricsSendInterval = conf.getInt(METRICS_SEND_INTERVAL, TimelineMetricsCache.MAX_EVICTION_TIME_MILLIS); // ~ 1 min
    metricsCache = new TimelineMetricsCache(maxRowCacheSize, metricsSendInterval);

    conf.setListDelimiter(',');
    Iterator<String> it = (Iterator<String>) conf.getKeys();
    while (it.hasNext()) {
        String propertyName = it.next();
        if (propertyName != null && propertyName.startsWith(TAGS_FOR_PREFIX_PROPERTY_PREFIX)) {
            String contextName = propertyName.substring(TAGS_FOR_PREFIX_PROPERTY_PREFIX.length());
            String[] tags = conf.getStringArray(propertyName);
            boolean useAllTags = false;
            Set<String> set = null;
            if (tags.length > 0) {
                set = new HashSet<String>();
                for (String tag : tags) {
                    tag = tag.trim();
                    useAllTags |= tag.equals("*");
                    if (tag.length() > 0) {
                        set.add(tag);
                    }
                }
                if (useAllTags) {
                    set = null;
                }
            }
            useTagsMap.put(contextName, set);
        }
    }
}

From source file:org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSinkTest.java

@Test
public void testPutMetrics() throws Exception {
    HadoopTimelineMetricsSink sink = new HadoopTimelineMetricsSink();

    SubsetConfiguration conf = createNiceMock(SubsetConfiguration.class);
    expect(conf.getString(eq("slave.host.name"))).andReturn("testhost").anyTimes();
    expect(conf.getParent()).andReturn(null).anyTimes();
    expect(conf.getPrefix()).andReturn("service").anyTimes();
    expect(conf.getString(eq(COLLECTOR_HOST_PROPERTY))).andReturn("localhost:63188").anyTimes();
    expect(conf.getString(eq("serviceName-prefix"), eq(""))).andReturn("").anyTimes();

    expect(conf.getInt(eq(MAX_METRIC_ROW_CACHE_SIZE), anyInt())).andReturn(10).anyTimes();
    expect(conf.getInt(eq(METRICS_SEND_INTERVAL), anyInt())).andReturn(1000).anyTimes();

    conf.setListDelimiter(eq(','));
    expectLastCall().anyTimes();/*from  www .  ja va 2s . co  m*/

    expect(conf.getKeys()).andReturn(new Iterator() {
        @Override
        public boolean hasNext() {
            return false;
        }

        @Override
        public Object next() {
            return null;
        }

        @Override
        public void remove() {

        }
    }).once();

    HttpClient httpClient = createNiceMock(HttpClient.class);

    expect(httpClient.executeMethod(anyObject(PostMethod.class))).andReturn(200).once(); //metrics send only once due to caching

    AbstractMetric metric = createNiceMock(AbstractMetric.class);
    expect(metric.name()).andReturn("metricName").anyTimes();
    expect(metric.value()).andReturn(9.5687).anyTimes();
    //TODO currently only numeric metrics are supported

    MetricsRecord record = createNiceMock(MetricsRecord.class);
    expect(record.name()).andReturn("testName").anyTimes();
    expect(record.context()).andReturn("testContext").anyTimes();
    expect(record.timestamp()).andAnswer(new IAnswer<Long>() {
        @Override
        public Long answer() throws Throwable {
            return System.currentTimeMillis();
        }
    }).anyTimes();

    expect(record.metrics()).andReturn(Arrays.asList(metric)).anyTimes();

    replay(conf, httpClient, record, metric);

    sink.setHttpClient(httpClient);
    sink.init(conf);

    sink.putMetrics(record);

    Thread.sleep(1500L);

    sink.putMetrics(record);

    verify(conf, httpClient, record, metric);
}

From source file:org.apache.hadoop.metrics2.sink.timeline.HadoopTimelineMetricsSinkTest.java

@Test
public void testDuplicateTimeSeriesNotSaved() throws Exception {
    HadoopTimelineMetricsSink sink = createMockBuilder(HadoopTimelineMetricsSink.class).withConstructor()
            .addMockedMethod("appendPrefix").addMockedMethod("emitMetrics").createNiceMock();

    SubsetConfiguration conf = createNiceMock(SubsetConfiguration.class);
    expect(conf.getString(eq("slave.host.name"))).andReturn("testhost").anyTimes();
    expect(conf.getParent()).andReturn(null).anyTimes();
    expect(conf.getPrefix()).andReturn("service").anyTimes();
    expect(conf.getString(eq(COLLECTOR_HOST_PROPERTY))).andReturn("localhost:63188").anyTimes();
    expect(conf.getString(eq("serviceName-prefix"), eq(""))).andReturn("").anyTimes();

    expect(conf.getInt(eq(MAX_METRIC_ROW_CACHE_SIZE), anyInt())).andReturn(10).anyTimes();
    // Return eviction time smaller than time diff for first 3 entries
    // Third entry will result in eviction
    expect(conf.getInt(eq(METRICS_SEND_INTERVAL), anyInt())).andReturn(10).anyTimes();

    conf.setListDelimiter(eq(','));
    expectLastCall().anyTimes();//from  w w  w. j  a  v  a 2 s  . c  om

    expect(conf.getKeys()).andReturn(new Iterator() {
        @Override
        public boolean hasNext() {
            return false;
        }

        @Override
        public Object next() {
            return null;
        }

        @Override
        public void remove() {

        }
    }).once();

    AbstractMetric metric = createNiceMock(AbstractMetric.class);
    expect(metric.name()).andReturn("metricName").anyTimes();
    expect(metric.value()).andReturn(1.0).once();
    expect(metric.value()).andReturn(2.0).once();
    expect(metric.value()).andReturn(3.0).once();
    expect(metric.value()).andReturn(4.0).once();
    expect(metric.value()).andReturn(5.0).once();
    expect(metric.value()).andReturn(6.0).once();

    MetricsRecord record = createNiceMock(MetricsRecord.class);
    expect(record.name()).andReturn("testName").anyTimes();
    expect(record.context()).andReturn("testContext").anyTimes();

    sink.appendPrefix(eq(record), (StringBuilder) anyObject());
    expectLastCall().anyTimes().andStubAnswer(new IAnswer<Object>() {
        @Override
        public Object answer() throws Throwable {
            return null;
        }
    });

    final Long now = System.currentTimeMillis();
    // TODO: Current implementation of cache needs > 1 elements to evict any
    expect(record.timestamp()).andReturn(now).times(2);
    expect(record.timestamp()).andReturn(now + 100l).times(2);
    expect(record.timestamp()).andReturn(now + 200l).once();
    expect(record.timestamp()).andReturn(now + 300l).once();

    expect(record.metrics()).andReturn(Arrays.asList(metric)).anyTimes();

    final List<TimelineMetrics> capturedMetrics = new ArrayList<TimelineMetrics>();
    sink.emitMetrics((TimelineMetrics) anyObject());
    expectLastCall().andStubAnswer(new IAnswer<Object>() {
        @Override
        public Object answer() throws Throwable {
            capturedMetrics.add((TimelineMetrics) EasyMock.getCurrentArguments()[0]);
            return null;
        }
    });

    replay(conf, sink, record, metric);

    sink.init(conf);

    // time = t1
    sink.putMetrics(record);
    // time = t1
    sink.putMetrics(record);
    // time = t2
    sink.putMetrics(record);
    // Evict
    // time = t2
    sink.putMetrics(record);
    // time = t3
    sink.putMetrics(record);
    // time = t4
    sink.putMetrics(record);

    verify(conf, sink, record, metric);

    Assert.assertEquals(2, capturedMetrics.size());
    Iterator<TimelineMetrics> metricsIterator = capturedMetrics.iterator();

    // t1, t2
    TimelineMetric timelineMetric1 = metricsIterator.next().getMetrics().get(0);
    Assert.assertEquals(2, timelineMetric1.getMetricValues().size());
    Iterator<Long> timestamps = timelineMetric1.getMetricValues().keySet().iterator();
    Assert.assertEquals(now, timestamps.next());
    Assert.assertEquals(new Long(now + 100l), timestamps.next());
    Iterator<Double> values = timelineMetric1.getMetricValues().values().iterator();
    Assert.assertEquals(new Double(1.0), values.next());
    Assert.assertEquals(new Double(3.0), values.next());
    // t3, t4
    TimelineMetric timelineMetric2 = metricsIterator.next().getMetrics().get(0);
    Assert.assertEquals(2, timelineMetric2.getMetricValues().size());
    timestamps = timelineMetric2.getMetricValues().keySet().iterator();
    Assert.assertEquals(new Long(now + 200l), timestamps.next());
    Assert.assertEquals(new Long(now + 300l), timestamps.next());
    values = timelineMetric2.getMetricValues().values().iterator();
    Assert.assertEquals(new Double(5.0), values.next());
    Assert.assertEquals(new Double(6.0), values.next());
}

From source file:org.apache.hadoop.metrics2.sink.timeline.TimelineMetricsSink.java

@Override
public void init(SubsetConfiguration conf) {
    super.init(conf);

    int maxRowCacheSize = conf.getInt(MAX_METRIC_ROW_CACHE_SIZE,
            TimelineMetricsCache.MAX_RECS_PER_NAME_DEFAULT);
    int metricsSendInterval = conf.getInt(METRICS_SEND_INTERVAL, TimelineMetricsCache.MAX_EVICTION_TIME_MILLIS); // ~ 1 min
    metricsCache = new TimelineMetricsCache(maxRowCacheSize, metricsSendInterval);

    conf.setListDelimiter(',');
    Iterator<String> it = (Iterator<String>) conf.getKeys();
    while (it.hasNext()) {
        String propertyName = it.next();
        if (propertyName != null && propertyName.startsWith(TAGS_FOR_PREFIX_PROPERTY_PREFIX)) {
            String contextName = propertyName.substring(TAGS_FOR_PREFIX_PROPERTY_PREFIX.length());
            String[] tags = conf.getStringArray(propertyName);
            boolean useAllTags = false;
            Set<String> set = null;
            if (tags.length > 0) {
                set = new HashSet<String>();
                for (String tag : tags) {
                    tag = tag.trim();/*from   w  w w. j  a v a 2s  .  co m*/
                    useAllTags |= tag.equals("*");
                    if (tag.length() > 0) {
                        set.add(tag);
                    }
                }
                if (useAllTags) {
                    set = null;
                }
            }
            useTagsMap.put(contextName, set);
        }
    }
}