Example usage for org.apache.commons.logging Log warn

List of usage examples for org.apache.commons.logging Log warn

Introduction

In this page you can find the example usage for org.apache.commons.logging Log warn.

Prototype

void warn(Object message);

Source Link

Document

Logs a message with warn log level.

Usage

From source file:org.eclipse.smila.search.datadictionary.DataDictionaryAccess.java

/**
 * DataDicitonaryAccess.// w  w w.  ja v  a2s.com
 * 
 * @return - an access object.
 */
public static DataDictionaryAccess getInstance() {

    final Log log = LogFactory.getLog(DataDictionaryAccess.class);

    DataDictionaryAccess[] types;
    try {
        types = getTypes();
        if (types.length != 1) {
            if (log.isWarnEnabled()) {
                log.warn("invalid data dictionary access count [" + types.length + "]");
            }
            return null;
        }
        return types[0];
    } catch (final DataDictionaryException e) {
        if (log.isErrorEnabled()) {
            log.error(e);
        }
        return null;
    }
}

From source file:org.eclipse.smila.search.plugin.PluginFactory.java

public static Plugin getInstance() {

    // TODO: implement correctly
    final Log log = LogFactory.getLog(PluginFactory.class);
    Plugin[] types;//from  ww  w.  j  a  v  a 2s .  c o  m
    try {
        types = getTypes();
        if (types.length != 1) {
            if (log.isWarnEnabled()) {
                log.warn("invalid plugin count [" + types.length + "]");
            }
            return null;
        }
        return types[0];
    } catch (final PluginException e) {
        if (log.isErrorEnabled()) {
            log.error(e);
        }
        return null;
    }
}

From source file:org.eclipse.smila.search.utils.advsearch.AdvSearchAccess.java

public static AdvSearchAccess getInstance() {
    final Log log = LogFactory.getLog(AdvSearchAccess.class);
    AdvSearchAccess[] types;/*  w w w  .  j  a va2s.com*/
    try {
        types = getTypes();
        if (types.length != 1) {
            if (log.isWarnEnabled()) {
                log.warn("invalid index structure access count [" + types.length + "]");
            }
            return null;
        }
        return types[0];
    } catch (final AdvSearchException e) {
        if (log.isErrorEnabled()) {
            log.error(e);
        }
        return null;
    }
}

From source file:org.eclipse.smila.search.utils.indexstructure.IndexStructureAccess.java

public static IndexStructureAccess getInstance() {

    final Log log = LogFactory.getLog(IndexStructureAccess.class);

    IndexStructureAccess[] types;/*from  www .  j  a  va 2s  . c  o  m*/
    try {
        types = getTypes();
        if (types.length != 1) {
            if (log.isWarnEnabled()) {
                log.warn("invalid index structure access count [" + types.length + "]");
            }
            return null;
        }
        return types[0];
    } catch (final ISException e) {
        if (log.isErrorEnabled()) {
            log.error(e);
        }
        return null;
    }
}

From source file:org.eclipse.smila.search.utils.search.SearchAccess.java

public static SearchAccess getInstance() {

    final Log log = LogFactory.getLog(SearchAccess.class);

    SearchAccess[] types;//  w  w w  .ja  va2s  . co m
    try {
        types = getTypes();
        if (types.length != 1) {
            if (log.isWarnEnabled()) {
                log.warn("invalid index structure access count [" + types.length + "]");
            }
            return null;
        }
        return types[0];
    } catch (final DSearchException e) {
        if (log.isErrorEnabled()) {
            log.error(e);
        }
        return null;
    }
}

From source file:org.eclipse.smila.utils.scriptexecution.LogHelper.java

/**
 * Logs message with specified level./*  w w w  .j av  a 2s  .co  m*/
 * 
 * @param log
 *          log
 * @param message
 *          message
 * @param logLevel
 *          log level
 */
private static void log(final Log log, final String message, final LogLevel logLevel) {
    if (LogLevel.DEBUG.equals(logLevel)) {
        log.debug(message);
    } else if (LogLevel.INFO.equals(logLevel)) {
        log.info(message);
    } else if (LogLevel.WARN.equals(logLevel)) {
        log.warn(message);
    } else if (LogLevel.ERROR.equals(logLevel)) {
        log.error(message);
    } else {
        throw new IllegalArgumentException("Unknown log level [" + logLevel + "]");
    }
}

From source file:org.elasticsearch.client.RequestLogger.java

/**
 * Logs a request that yielded a response
 *///from w  ww .  ja  v  a2  s . c o m
static void logResponse(Log logger, HttpUriRequest request, HttpHost host, HttpResponse httpResponse) {
    if (logger.isDebugEnabled()) {
        logger.debug("request [" + request.getMethod() + " " + host + getUri(request.getRequestLine())
                + "] returned [" + httpResponse.getStatusLine() + "]");
    }
    if (logger.isWarnEnabled()) {
        Header[] warnings = httpResponse.getHeaders("Warning");
        if (warnings != null && warnings.length > 0) {
            logger.warn(buildWarningMessage(request, host, warnings));
        }
    }
    if (tracer.isTraceEnabled()) {
        String requestLine;
        try {
            requestLine = buildTraceRequest(request, host);
        } catch (IOException e) {
            requestLine = "";
            tracer.trace("error while reading request for trace purposes", e);
        }
        String responseLine;
        try {
            responseLine = buildTraceResponse(httpResponse);
        } catch (IOException e) {
            responseLine = "";
            tracer.trace("error while reading response for trace purposes", e);
        }
        tracer.trace(requestLine + '\n' + responseLine);
    }
}

From source file:org.elasticsearch.hadoop.rest.InitializationUtils.java

public static <T> void saveSchemaIfNeeded(Object conf, ValueWriter<T> schemaWriter, T schema, Log log) {
    Settings settings = HadoopSettingsManager.loadFrom(conf);

    if (settings.getIndexAutoCreate()) {
        RestRepository client = new RestRepository(settings);
        if (!client.indexExists(false)) {
            if (schemaWriter == null) {
                log.warn(String.format(
                        "No mapping found [%s] and no schema found; letting Elasticsearch perform auto-mapping...",
                        settings.getResourceWrite()));
            } else {
                log.info(String.format("No mapping found [%s], creating one based on given schema",
                        settings.getResourceWrite()));
                ContentBuilder builder = ContentBuilder.generate(schemaWriter).value(schema).flush();
                BytesArray content = ((FastByteArrayOutputStream) builder.content()).bytes();
                builder.close();//from w ww.  j a va 2 s .co  m
                client.putMapping(content);
                if (log.isDebugEnabled()) {
                    log.debug(String.format("Creating ES mapping [%s] from schema [%s]", content.toString(),
                            schema));
                }
            }
        }
        client.close();
    }
}

From source file:org.elasticsearch.hadoop.rest.ShardSorter.java

public static Map<Shard, Node> find(List<List<Map<String, Object>>> targetShards, Map<String, Node> httpNodes,
        Log log) {
    // group the shards per node
    Map<Node, Set<Shard>> shardsPerNode = new LinkedHashMap<Node, Set<Shard>>();
    // nodes for each shard
    Map<SimpleShard, Set<Node>> nodesForShard = new LinkedHashMap<SimpleShard, Set<Node>>();

    // for each shard group
    for (List<Map<String, Object>> shardGroup : targetShards) {
        for (Map<String, Object> shardData : shardGroup) {
            Shard shard = new Shard(shardData);
            Node node = httpNodes.get(shard.getNode());
            if (node == null) {
                log.warn(String.format(
                        "Cannot find node with id [%s] (is HTTP enabled?) from shard [%s] in nodes [%s]; layout [%s]",
                        shard.getNode(), shard, httpNodes, targetShards));
                return Collections.emptyMap();
            }//from w  w w.jav a2s.c o  m

            // node -> shards
            Set<Shard> shardSet = shardsPerNode.get(node);
            if (shardSet == null) {
                shardSet = new LinkedHashSet<Shard>();
                shardsPerNode.put(node, shardSet);
            }
            shardSet.add(shard);

            // shard -> nodes
            SimpleShard ss = SimpleShard.from(shard);
            Set<Node> nodeSet = nodesForShard.get(ss);
            if (nodeSet == null) {
                nodeSet = new LinkedHashSet<Node>();
                nodesForShard.put(ss, nodeSet);
            }
            nodeSet.add(node);
        }
    }

    return checkCombo(httpNodes.values(), shardsPerNode, targetShards.size());
}

From source file:org.elasticsearch.hadoop.serialization.dto.mapping.MappingUtils.java

public static void validateMapping(Collection<String> fields, Field mapping, FieldPresenceValidation validation,
        Log log) {
    if (mapping == null || fields == null || fields.isEmpty() || validation == null
            || FieldPresenceValidation.IGNORE == validation) {
        return;/*from   ww  w . j av  a  2s.co m*/
    }

    List[] results = findTypos(fields, mapping);

    if (results == null) {
        return;
    }

    String message = String.format(
            "Field(s) [%s] not found in the Elasticsearch mapping specified; did you mean [%s]?",
            removeDoubleBrackets(results[0]), removeDoubleBrackets(results[1]));
    if (validation == FieldPresenceValidation.WARN) {
        log.warn(message);
    } else {
        throw new EsHadoopIllegalArgumentException(message);
    }
}