Example usage for org.apache.commons.configuration SubsetConfiguration getString

List of usage examples for org.apache.commons.configuration SubsetConfiguration getString

Introduction

In this page you can find the example usage for org.apache.commons.configuration SubsetConfiguration getString.

Prototype

public String getString(String key) 

Source Link

Usage

From source file:com.zavakid.mushroom.filter.AbstractPatternFilter.java

@Override
public void init(SubsetConfiguration conf) {
    String patternString = conf.getString(INCLUDE_KEY);
    if (patternString != null && !patternString.isEmpty()) {
        LOG.debug("got include pattern: " + patternString);
        setIncludePattern(compile(patternString));
    }//  w  w w . ja  v  a 2 s.co m
    patternString = conf.getString(EXCLUDE_KEY);
    if (patternString != null && !patternString.isEmpty()) {
        LOG.debug("got include pattern: " + patternString);
        setExcludePattern(compile(patternString));
    }
    String[] patternStrings = conf.getStringArray(INCLUDE_TAGS_KEY);
    if (patternStrings != null && patternStrings.length != 0) {
        LOG.debug("got include tags pattern: " + patternStrings);
        for (String pstr : patternStrings) {
            Matcher matcher = tagPattern.matcher(pstr);
            if (!matcher.matches()) {
                throw new MetricsException("Illegal tag pattern: " + pstr);
            }
            setIncludeTagPattern(matcher.group(1), compile(matcher.group(2)));
        }
    }
    patternStrings = conf.getStringArray(EXCLUDE_TAGS_KEY);
    if (patternStrings != null && patternStrings.length != 0) {
        LOG.debug("got exclude tags pattern: " + patternStrings);
        for (String pstr : patternStrings) {
            Matcher matcher = tagPattern.matcher(pstr);
            if (!matcher.matches()) {
                throw new MetricsException("Illegal tag pattern: " + pstr);
            }
            setExcludeTagPattern(matcher.group(1), compile(matcher.group(2)));
        }
    }
}

From source file:org.apache.hadoop.chukwa.inputtools.log4j.Log4jMetricsSink.java

@Override
public void init(SubsetConfiguration conf) {
    String host = conf.getString(HOST_PROPERTY);
    if (host != null) {
        this.host = host;
    }//  w w w.  ja va  2 s . c o  m
    String port = conf.getString(PORT_PROPERTY);
    if (port != null) {
        this.port = Integer.parseInt(port);
    }
    String context = conf.getString(CONTEXT);
    if (context != null) {
        this.context = context;
    }

    PatternLayout layout = new PatternLayout("%d{ISO8601} %p %c: %m%n");

    org.apache.log4j.net.SocketAppender appender = new org.apache.log4j.net.SocketAppender(this.host,
            this.port);

    appender.setName("chukwa.metrics." + this.context);
    appender.setLayout(layout);

    Logger logger = Logger.getLogger("chukwa.metrics." + this.context);
    logger.setAdditivity(false);
    logger.addAppender(appender);
    appender.activateOptions();
    out = logger;
}

From source file:org.apache.hadoop.metrics2.cloudwatch.CloudWatchSink.java

@Override
public void init(SubsetConfiguration conf) {
    String accessKey = conf.getString("accesskey");
    String secretKey = conf.getString("secretkey");
    String region = conf.getString("region");
    String namespace = conf.getString("namespace");
    String batch = conf.getString("batch");

    try {//from   w  w  w . jav  a 2 s  . c o m
        _namespace = (null == namespace ? "Custom" : namespace);
        _batch = (null == batch ? 5 : Integer.valueOf(batch));

        AWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey);
        _client = new AmazonCloudWatchClient(awsCredentials);
        _client.setRegion(Region.getRegion(Regions.fromName(region)));
    } catch (Exception e) {
        throw new MetricsException(e.getMessage());
    }
}

From source file:org.apache.hadoop.metrics2.impl.TestKafkaMetrics.java

@Test
@SuppressWarnings({ "unchecked", "rawtypes" })
public void testPutMetrics() throws Exception {
    // Create a record by mocking MetricsRecord class.
    MetricsRecord record = mock(MetricsRecord.class);
    when(record.tags()).thenReturn(Lists.newArrayList(new MetricsTag(KafkaMetricsInfo.KafkaTag, "test_tag")));
    when(record.timestamp()).thenReturn(System.currentTimeMillis());

    // Create a metric using AbstractMetric class.
    AbstractMetric metric = new AbstractMetric(KafkaMetricsInfo.KafkaCounter) {
        @Override/*  w  w w .  j  a  v a 2s  .  c  o m*/
        public Number value() {
            return new Integer(123);
        }

        @Override
        public MetricType type() {
            return null;
        }

        @Override
        public void visit(MetricsVisitor visitor) {

        }
    };

    // Create a list of metrics.
    Iterable<AbstractMetric> metrics = Lists.newArrayList(metric);
    when(record.name()).thenReturn("Kafka record name");
    when(record.metrics()).thenReturn(metrics);
    SubsetConfiguration conf = mock(SubsetConfiguration.class);
    when(conf.getString(KafkaSink.BROKER_LIST)).thenReturn("localhost:9092");
    String topic = "myTestKafkaTopic";
    when(conf.getString(KafkaSink.TOPIC)).thenReturn(topic);

    // Create the KafkaSink object and initialize it.
    kafkaSink = new KafkaSink();
    kafkaSink.init(conf);

    // Create a mock KafkaProducer as a producer for KafkaSink.
    Producer<Integer, byte[]> mockProducer = mock(KafkaProducer.class);
    kafkaSink.setProducer(mockProducer);

    // Create the json object from the record.
    StringBuilder jsonLines = recordToJson(record);
    if (LOG.isDebugEnabled()) {
        LOG.debug("kafka message: " + jsonLines.toString());
    }

    // Send the record and store the result in a mock Future.
    Future<RecordMetadata> f = mock(Future.class);
    when(mockProducer.send((ProducerRecord) anyObject())).thenReturn(f);
    kafkaSink.putMetrics(record);

    // Get the argument and verity it.
    ArgumentCaptor<ProducerRecord> argument = ArgumentCaptor.forClass(ProducerRecord.class);
    verify(mockProducer).send(argument.capture());

    // Compare the received data with the original one.
    ProducerRecord<Integer, byte[]> data = (argument.getValue());
    String jsonResult = new String(data.value());
    if (LOG.isDebugEnabled()) {
        LOG.debug("kafka result: " + jsonResult);
    }
    assertEquals(jsonLines.toString(), jsonResult);
}

From source file:org.apache.hadoop.metrics2.ovis.OvisMetricNameSink.java

@Override
public void init(SubsetConfiguration conf) {

    filename = conf.getString("filename");
    try {/*from w w w . ja  v  a2 s.c  om*/
        if (filename == null) {
            writer = new PrintWriter(new BufferedOutputStream(System.out));
        } else {
            writer = new PrintWriter(new FileWriter(new File(filename)));
        }
    } catch (Exception e) {
        throw new MetricsException(
                "OvisMetricNameSink: " + "Error in creating file " + filename + ": " + e.getMessage());
    }
}

From source file:org.apache.hadoop.metrics2.sink.FileSink.java

@Override
public void init(SubsetConfiguration conf) {
    String filename = conf.getString(FILENAME_KEY);
    try {//from www .ja  v a2  s  .c om
        writer = filename == null ? new PrintWriter(new BufferedOutputStream(System.out))
                : new PrintWriter(new FileWriter(new File(filename), true));
    } catch (Exception e) {
        throw new MetricsException("Error creating " + filename, e);
    }
}

From source file:org.apache.hadoop.metrics2.sink.FileSinkHashMap.java

@Override
public void init(SubsetConfiguration conf) {
    String filename = conf.getString(FILENAME_KEY);
    try {/*from  ww w. j a  v a  2  s  .  co  m*/
        writer = filename == null ? new PrintWriter(System.out)
                : new PrintWriter(new FileWriter(new File(filename), true));
    } catch (Exception e) {
        throw new MetricsException("Error creating " + filename, e);
    }
}

From source file:org.apache.hadoop.metrics2.sink.ganglia.AbstractGangliaSink.java

public void init(SubsetConfiguration conf) {
    LOG.debug("Initializing the GangliaSink for Ganglia metrics.");

    this.conf = conf;

    // Take the hostname from the DNS class.
    if (conf.getString("slave.host.name") != null) {
        hostName = conf.getString("slave.host.name");
    } else {/*  ww  w  .  j a  v a2 s .c o  m*/
        try {
            hostName = DNS.getDefaultHost(conf.getString("dfs.datanode.dns.interface", "default"),
                    conf.getString("dfs.datanode.dns.nameserver", "default"));
        } catch (UnknownHostException uhe) {
            LOG.error(uhe);
            hostName = "UNKNOWN.example.com";
        }
    }

    // load the gannglia servers from properties
    metricsServers = Servers.parse(conf.getString(SERVERS_PROPERTY), DEFAULT_PORT);

    // extract the Ganglia conf per metrics
    gangliaConfMap = new HashMap<String, GangliaConf>();
    loadGangliaConf(GangliaConfType.units);
    loadGangliaConf(GangliaConfType.tmax);
    loadGangliaConf(GangliaConfType.dmax);
    loadGangliaConf(GangliaConfType.slope);

    try {
        datagramSocket = new DatagramSocket();
    } catch (SocketException se) {
        LOG.error(se);
    }

    // see if sparseMetrics is supported. Default is false
    supportSparseMetrics = conf.getBoolean(SUPPORT_SPARSE_METRICS_PROPERTY, SUPPORT_SPARSE_METRICS_DEFAULT);
}

From source file:org.apache.hadoop.metrics2.sink.GraphiteSink.java

@Override
public void init(SubsetConfiguration conf) {
    // Get Graphite host configurations.
    String serverHost = conf.getString(SERVER_HOST_KEY);
    Integer serverPort = Integer.parseInt(conf.getString(SERVER_PORT_KEY));

    // Get Graphite metrics graph prefix.
    metricsPrefix = conf.getString(METRICS_PREFIX);
    if (metricsPrefix == null)
        metricsPrefix = "";

    try {//from   w ww  .  ja  v  a2s .  c o m
        // Open an connection to Graphite server.
        Socket socket = new Socket(serverHost, serverPort);
        setWriter(new OutputStreamWriter(socket.getOutputStream()));
    } catch (Exception e) {
        throw new MetricsException("Error creating connection, " + serverHost + ":" + serverPort, e);
    }
}

From source file:org.apache.hadoop.metrics2.sink.KafkaSink.java

@Override
public void init(SubsetConfiguration conf) {
    // Get Kafka broker configuration.
    Properties props = new Properties();
    brokerList = conf.getString(BROKER_LIST);
    if (LOG.isDebugEnabled()) {
        LOG.debug("Broker list " + brokerList);
    }//from   ww w. ja v  a 2s.  c om
    props.put("bootstrap.servers", brokerList);
    if (LOG.isDebugEnabled()) {
        LOG.debug("Kafka brokers: " + brokerList);
    }

    // Get Kafka topic configuration.
    topic = conf.getString(TOPIC);
    if (LOG.isDebugEnabled()) {
        LOG.debug("Kafka topic " + topic);
    }
    if (Strings.isNullOrEmpty(topic)) {
        throw new MetricsException("Kafka topic can not be null");
    }

    // Set the rest of Kafka configuration.
    props.put("key.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer");
    props.put("value.serializer", "org.apache.kafka.common.serialization.ByteArraySerializer");
    props.put("request.required.acks", "0");

    // Set the hostname once and use it in every message.
    hostname = "null";
    try {
        hostname = InetAddress.getLocalHost().getHostName();
    } catch (Exception e) {
        LOG.warn("Error getting Hostname, going to continue");
    }

    try {
        // Create the producer object.
        producer = new KafkaProducer<Integer, byte[]>(props);
    } catch (Exception e) {
        throw new MetricsException("Error creating Producer, " + brokerList, e);
    }
}