Example usage for org.apache.commons.configuration Configuration getInt

List of usage examples for org.apache.commons.configuration Configuration getInt

Introduction

In this page you can find the example usage for org.apache.commons.configuration Configuration getInt.

Prototype

int getInt(String key, int defaultValue);

Source Link

Document

Get a int associated with the given configuration key.

Usage

From source file:org.ambraproject.service.search.SolrSearchService.java

public void setConfiguration(Configuration config) throws ApplicationException {
    queryTimeout = config.getInt("ambra.services.search.timeout", 60000); // default to 1 min

    List sizes = config.getList("ambra.services.search.pageSizes.size");

    if (sizes == null) {
        throw new ApplicationException("ambra.services.search.pageSizes not defined " + "in configuration.");
    }/*w  w w. java2s .  c om*/

    pageSizes = sizes;

    if (config.containsKey("ambra.services.search.sortOptions.option")) {
        validSorts = new HashMap();
        displaySorts = new ArrayList();

        HierarchicalConfiguration hc = (HierarchicalConfiguration) config;
        List<HierarchicalConfiguration> sorts = hc.configurationsAt("ambra.services.search.sortOptions.option");

        for (HierarchicalConfiguration s : sorts) {
            String key = s.getString("[@displayName]");
            String value = s.getString("");
            validSorts.put(key, value);
            displaySorts.add(key);
        }

        ((HierarchicalConfiguration) config).setExpressionEngine(null);
    } else {
        throw new ApplicationException(
                "ambra.services.search.sortOptions.option not defined " + "in configuration.");
    }

    if (config.containsKey("ambra.services.search.keywordFields.field")) {
        validKeywords = new HashMap();
        HierarchicalConfiguration hc = (HierarchicalConfiguration) config;
        List<HierarchicalConfiguration> sorts = hc
                .configurationsAt("ambra.services.search.keywordFields.field");

        for (HierarchicalConfiguration s : sorts) {
            String key = s.getString("[@displayName]");
            String value = s.getString("");
            validKeywords.put(key, value);

        }
    } else {
        throw new ApplicationException(
                "ambra.services.search.keywordFields.field not defined " + "in configuration.");
    }
}

From source file:org.ambraproject.struts2.AmbraFreemarkerConfig.java

/**
 * Constructor that loads the list of css and javascript files and page titles for pages which
 * follow the standard templates.  Creates its own composite configuration by iterating over each
 * of the configs in the config to assemble a union of pages defined.
 * @param configuration Ambra configuration
 * @throws Exception Exception/*from  w  ww. ja  v a 2s  .c om*/
 *
 */
public AmbraFreemarkerConfig(Configuration configuration) throws Exception {
    if (log.isDebugEnabled()) {
        log.debug("Creating FreeMarker configuration");
    }
    dojoDebug = configuration.getBoolean("struts.devMode");
    dirPrefix = configuration.getString("ambra.platform.appContext");
    subdirPrefix = configuration.getString("ambra.platform.resourceSubDir");
    host = configuration.getString("ambra.platform.host");
    casLoginURL = configuration.getString("ambra.services.cas.url.login");
    casLogoutURL = configuration.getString("ambra.services.cas.url.logout");
    registrationURL = configuration.getString("ambra.services.registration.url.registration");
    changePasswordURL = configuration.getString("ambra.services.registration.url.change-password");
    changeEmailURL = configuration.getString("ambra.services.registration.url.change-email");
    doiResolverURL = configuration.getString("ambra.services.crossref.plos.doiurl");
    pubGetURL = configuration.getString("ambra.services.pubget.url");
    defaultJournalName = configuration.getString(DEFAULT_JOURNAL_NAME_CONFIG_KEY);
    journals = new HashMap<String, JournalConfig>();
    journalsByIssn = new HashMap<String, JournalConfig>();
    orgName = configuration.getString("ambra.platform.name");
    feedbackEmail = configuration.getString("ambra.platform.email.feedback");
    cache_storage_strong = configuration.getInt("ambra.platform.template_cache.strong",
            DEFAULT_TEMPLATE_CACHE_STRONG);
    cache_storage_soft = configuration.getInt("ambra.platform.template_cache.soft",
            DEFAULT_TEMPLATE_CACHE_SOFT);
    templateUpdateDelay = configuration.getInt("ambra.platform.template_cache.update_delay",
            DEFAULT_TEMPLATE_UPDATE_DELAY);
    String date = configuration.getString("ambra.platform.cisStartDate");
    freemarkerProperties = configuration.subset("ambra.platform.freemarker");

    if (date == null) {
        throw new Exception("Could not find the cisStartDate node in the "
                + "ambra platform configuration.  Make sure the " + "ambra/platform/cisStartDate node exists.");
    }

    try {
        cisStartDate = DateFormat.getDateInstance(DateFormat.SHORT).parse(date);
    } catch (ParseException ex) {
        throw new Exception("Could not parse the cisStartDate value of \"" + date
                + "\" in the ambra platform configuration.  Make sure the cisStartDate is in the "
                + "following format: dd/mm/yyyy", ex);
    }

    loadConfig(configuration);

    processVirtualJournalConfig(configuration);

    // Now that the "journals" Map exists, index that map by Eissn to populate "journalsByEissn".
    if (journals.entrySet() != null && journals.entrySet().size() > 0) {
        for (Entry<String, JournalConfig> e : journals.entrySet()) {
            JournalConfig j = e.getValue();
            journalsByIssn.put(j.getIssn(), j);
        }
    }

    if (log.isTraceEnabled()) {
        for (Entry<String, JournalConfig> e : journals.entrySet()) {
            JournalConfig j = e.getValue();
            log.trace("Journal: " + e.getKey());
            log.trace("Journal url: " + j.getUrl());
            log.trace("Default Title: " + j.getDefaultTitle());
            log.trace("Default CSS: " + printArray(j.getDefaultCss()));
            log.trace("Default JavaScript: " + printArray(j.getDefaultCss()));
            Map<String, String[]> map = j.getCssFiles();
            for (Entry<String, String[]> entry : map.entrySet()) {
                log.trace("PageName: " + entry.getKey());
                log.trace("CSS FILES: " + printArray(entry.getValue()));
            }
            map = j.getJavaScriptFiles();
            for (Entry<String, String[]> entry : map.entrySet()) {
                log.trace("PageName: " + entry.getKey());
                log.trace("JS FILES: " + printArray(entry.getValue()));
            }

            for (Entry<String, String> entry : j.getTitles().entrySet()) {
                log.trace("PageName: " + entry.getKey());
                log.trace("Title: " + entry.getValue());
            }
        }
        log.trace("Dir Prefix: " + dirPrefix);
        log.trace("SubDir Prefix: " + subdirPrefix);
        log.trace("Host: " + host);
        log.trace("Cas url login: " + casLoginURL);
        log.trace("Case url logout: " + casLogoutURL);
        log.trace("Registration URL: " + registrationURL);
        log.trace("Registration Change Pass URL: " + changePasswordURL);
        log.trace("Registration Change EMail URL: " + changeEmailURL);
        log.trace("DOI Resolver URL: " + doiResolverURL);
        log.trace("PubGet URL:" + pubGetURL);
        log.trace("Default Journal Name: " + defaultJournalName);
    }
    if (log.isDebugEnabled()) {
        log.debug("End FreeMarker Configuration Reading");
    }
}

From source file:org.apache.atlas.Atlas.java

private static int getPortValue(Configuration configuration, String enableTLSFlag) {
    int appPort;/*from   ww  w  .j ava  2 s.c  o m*/

    assert configuration != null;
    appPort = StringUtils.isEmpty(enableTLSFlag) || enableTLSFlag.equals("true")
            ? configuration.getInt(ATLAS_SERVER_HTTPS_PORT, 21443)
            : configuration.getInt(ATLAS_SERVER_HTTP_PORT, 21000);
    return appPort;
}

From source file:org.apache.atlas.AtlasBaseClient.java

@VisibleForTesting
protected Client getClient(Configuration configuration, UserGroupInformation ugi, String doAsUser) {
    DefaultClientConfig config = new DefaultClientConfig();
    // Enable POJO mapping feature
    config.getFeatures().put(JSONConfiguration.FEATURE_POJO_MAPPING, Boolean.TRUE);
    int readTimeout = configuration.getInt("atlas.client.readTimeoutMSecs", 60000);
    int connectTimeout = configuration.getInt("atlas.client.connectTimeoutMSecs", 60000);
    if (configuration.getBoolean(TLS_ENABLED, false)) {
        // create an SSL properties configuration if one doesn't exist.  SSLFactory expects a file, so forced
        // to create a
        // configuration object, persist it, then subsequently pass in an empty configuration to SSLFactory
        try {/* w  w w  . ja  v  a2 s  . co m*/
            SecureClientUtils.persistSSLClientConfiguration(configuration);
        } catch (Exception e) {
            LOG.info("Error processing client configuration.", e);
        }
    }

    final URLConnectionClientHandler handler;

    if ((!AuthenticationUtil.isKerberosAuthenticationEnabled()) && basicAuthUser != null
            && basicAuthPassword != null) {
        if (configuration.getBoolean(TLS_ENABLED, false)) {
            handler = SecureClientUtils.getUrlConnectionClientHandler();
        } else {
            handler = new URLConnectionClientHandler();
        }
    } else {
        handler = SecureClientUtils.getClientConnectionHandler(config, configuration, doAsUser, ugi);
    }
    Client client = new Client(handler, config);
    client.setReadTimeout(readTimeout);
    client.setConnectTimeout(connectTimeout);
    return client;
}

From source file:org.apache.atlas.ha.HAConfiguration.java

public static ZookeeperProperties getZookeeperProperties(Configuration configuration) {
    String zookeeperConnectString = configuration.getString("atlas.kafka." + ZOOKEEPER_PREFIX + "connect");
    if (configuration.containsKey(HA_ZOOKEEPER_CONNECT)) {
        zookeeperConnectString = configuration.getString(HA_ZOOKEEPER_CONNECT);
    }//from  www.  j  a  v  a2 s .  c o  m

    String zkRoot = configuration.getString(ATLAS_SERVER_HA_ZK_ROOT_KEY, ATLAS_SERVER_ZK_ROOT_DEFAULT);
    int retriesSleepTimeMillis = configuration.getInt(HA_ZOOKEEPER_RETRY_SLEEPTIME_MILLIS,
            DEFAULT_ZOOKEEPER_CONNECT_SLEEPTIME_MILLIS);

    int numRetries = configuration.getInt(HA_ZOOKEEPER_NUM_RETRIES, DEFAULT_ZOOKEEPER_CONNECT_NUM_RETRIES);

    int sessionTimeout = configuration.getInt(HA_ZOOKEEPER_SESSION_TIMEOUT_MS,
            DEFAULT_ZOOKEEPER_SESSION_TIMEOUT_MILLIS);

    String acl = configuration.getString(HA_ZOOKEEPER_ACL);
    String auth = configuration.getString(HA_ZOOKEEPER_AUTH);
    return new ZookeeperProperties(zookeeperConnectString, zkRoot, retriesSleepTimeMillis, numRetries,
            sessionTimeout, acl, auth);
}

From source file:org.apache.atlas.hook.AtlasTopicCreator.java

@VisibleForTesting
protected void createTopic(Configuration atlasProperties, String topicName, ZkUtils zkUtils) {
    int numPartitions = atlasProperties.getInt("atlas.notification.hook.numthreads", 1);
    int numReplicas = atlasProperties.getInt("atlas.notification.replicas", 1);
    AdminUtils.createTopic(zkUtils, topicName, numPartitions, numReplicas, new Properties(),
            RackAwareMode.Enforced$.MODULE$);
    LOG.warn("Created topic {} with partitions {} and replicas {}", topicName, numPartitions, numReplicas);
}

From source file:org.apache.atlas.hook.AtlasTopicCreator.java

@VisibleForTesting
protected ZkUtils createZkUtils(Configuration atlasProperties) {
    String zkConnect = atlasProperties.getString("atlas.kafka.zookeeper.connect");
    int sessionTimeout = atlasProperties.getInt("atlas.kafka.zookeeper.session.timeout.ms", 400);
    int connectionTimeout = atlasProperties.getInt("atlas.kafka.zookeeper.connection.timeout.ms", 200);
    Tuple2<ZkClient, ZkConnection> zkClientAndConnection = ZkUtils.createZkClientAndConnection(zkConnect,
            sessionTimeout, connectionTimeout);
    return new ZkUtils(zkClientAndConnection._1(), zkClientAndConnection._2(), false);
}

From source file:org.apache.atlas.services.MetricsService.java

@VisibleForTesting
MetricsService(Configuration configuration, AtlasGraph graph) {
    MetricsService.configuration = configuration;

    atlasGraph = graph;//w w w  . j  ava 2s. com
    cacheTTLInSecs = configuration != null
            ? configuration.getInt(METRIC_QUERY_CACHE_TTL, DEFAULT_CACHE_TTL_IN_SECS)
            : DEFAULT_CACHE_TTL_IN_SECS;
    gremlinQueryProvider = AtlasGremlinQueryProvider.INSTANCE;
}

From source file:org.apache.atlas.sqoop.hook.SqoopHook.java

@Override
public void publish(SqoopJobDataPublisher.Data data) throws AtlasHookException {
    try {//from  ww  w. ja  va 2  s .  c o  m
        Configuration atlasProperties = ApplicationProperties.get();
        String clusterName = atlasProperties.getString(ATLAS_CLUSTER_NAME, DEFAULT_CLUSTER_NAME);

        Referenceable dbStoreRef = createDBStoreInstance(data);
        Referenceable dbRef = createHiveDatabaseInstance(clusterName, data.getHiveDB());
        Referenceable hiveTableRef = createHiveTableInstance(clusterName, dbRef, data.getHiveTable(),
                data.getHiveDB());
        Referenceable procRef = createSqoopProcessInstance(dbStoreRef, hiveTableRef, data, clusterName);

        int maxRetries = atlasProperties.getInt(HOOK_NUM_RETRIES, 3);
        HookNotification.HookNotificationMessage message = new HookNotification.EntityCreateRequest(
                AtlasHook.getUser(), dbStoreRef, dbRef, hiveTableRef, procRef);
        AtlasHook.notifyEntities(Arrays.asList(message), maxRetries);
    } catch (Exception e) {
        throw new AtlasHookException("SqoopHook.publish() failed.", e);
    }
}

From source file:org.apache.atlas.web.service.EmbeddedServer.java

protected Integer getBufferSize() {
    try {/*from w w  w.j  av  a2s. co  m*/
        Configuration configuration = ApplicationProperties.get();
        return configuration.getInt("atlas.jetty.request.buffer.size", DEFAULT_BUFFER_SIZE);
    } catch (Exception e) {
        // do nothing
    }

    return DEFAULT_BUFFER_SIZE;
}