Example usage for org.apache.commons.logging Log isInfoEnabled

List of usage examples for org.apache.commons.logging Log isInfoEnabled

Introduction

In this page you can find the example usage for org.apache.commons.logging Log isInfoEnabled.

Prototype

boolean isInfoEnabled();

Source Link

Document

Is info logging currently enabled?

Usage

From source file:org.apache.cayenne.dbsync.reverse.dbimport.DefaultDbImportActionTest.java

private DefaultDbImportAction buildDbImportAction(FileProjectSaver projectSaver, MapLoader mapLoader,
        final DbLoader dbLoader) throws Exception {

    Log log = mock(Log.class);
    when(log.isDebugEnabled()).thenReturn(true);
    when(log.isInfoEnabled()).thenReturn(true);

    DbAdapter dbAdapter = mock(DbAdapter.class);

    DbAdapterFactory adapterFactory = mock(DbAdapterFactory.class);
    when(adapterFactory.createAdapter(any(DataNodeDescriptor.class), any(DataSource.class)))
            .thenReturn(dbAdapter);/*w ww . ja v  a  2 s .co  m*/

    DataSourceFactory dataSourceFactory = mock(DataSourceFactory.class);
    DataSource mock = mock(DataSource.class);
    when(dataSourceFactory.getDataSource(any(DataNodeDescriptor.class))).thenReturn(mock);

    MergerTokenFactoryProvider mergerTokenFactoryProvider = mock(MergerTokenFactoryProvider.class);
    when(mergerTokenFactoryProvider.get(any(DbAdapter.class))).thenReturn(new DefaultMergerTokenFactory());

    return new DefaultDbImportAction(log, projectSaver, dataSourceFactory, adapterFactory, mapLoader,
            mergerTokenFactoryProvider) {

        protected DbLoader createDbLoader(DbAdapter adapter, Connection connection,
                DbImportConfiguration config) {
            return dbLoader;
        }
    };
}

From source file:org.apache.cayenne.tools.dbimport.DbImportActionTest.java

@Test
public void testImportWithoutChanges() throws Exception {
    DbLoader dbLoader = new DbLoader(null, null, null) {
        @Override/*from   w ww.j a v a2  s.c  o  m*/
        public void load(DataMap dataMap, DbLoaderConfiguration config) throws SQLException {
            new DataMapBuilder(dataMap)
                    .with(dbEntity("ARTGROUP").attributes(dbAttr("NAME").typeVarchar(100).mandatory()));
        }

        @Override
        public String[] getDefaultTableTypes() {
            return null;
        }
    };

    DbImportConfiguration params = mock(DbImportConfiguration.class);
    when(params.createLoader(any(DbAdapter.class), any(Connection.class), any(DbLoaderDelegate.class)))
            .thenReturn(dbLoader);

    when(params.createDataMap()).thenReturn(new DataMap("testImport"));
    when(params.getDataMapFile()).thenReturn(FILE_STUB);
    when(params.createMergeDelegate()).thenReturn(new DefaultModelMergeDelegate());
    when(params.getDbLoaderConfig()).thenReturn(new DbLoaderConfiguration());

    Log log = mock(Log.class);
    when(log.isDebugEnabled()).thenReturn(false);
    when(log.isInfoEnabled()).thenReturn(false);

    FileProjectSaver projectSaver = mock(FileProjectSaver.class);
    doNothing().when(projectSaver).save(any(Project.class));

    MapLoader mapLoader = mock(MapLoader.class);
    stub(mapLoader.loadDataMap(any(InputSource.class))).toReturn(new DataMapBuilder()
            .with(dbEntity("ARTGROUP").attributes(dbAttr("NAME").typeVarchar(100).mandatory())).build());

    DbImportAction action = buildDbImportAction(log, projectSaver, mapLoader);

    action.execute(params);

    verify(projectSaver, never()).save(any(Project.class));
    verify(mapLoader, times(1)).loadDataMap(any(InputSource.class));
}

From source file:org.apache.cayenne.tools.dbimport.DbImportActionTest.java

private DbImportAction buildDbImportAction(FileProjectSaver projectSaver, MapLoader mapLoader)
        throws Exception {
    Log log = mock(Log.class);
    when(log.isDebugEnabled()).thenReturn(true);
    when(log.isInfoEnabled()).thenReturn(true);

    return buildDbImportAction(log, projectSaver, mapLoader);
}

From source file:org.apache.cocoon.servlet.RequestUtil.java

public static void manageException(HttpServletRequest req, HttpServletResponse res, Environment env, String uri,
        int errorStatus, String title, String message, String description, Exception e,
        ServletSettings settings, Log logger, Object sender) throws IOException {
    manageException(req, res, env, uri, errorStatus, title, message, description, e, settings,
            logger.isInfoEnabled(), sender);
}

From source file:org.apache.flink.runtime.util.EnvironmentInformation.java

public static void logEnvironmentInfo(Log log, String componentName) {
    if (log.isInfoEnabled()) {
        RevisionInformation rev = getRevisionInformation();
        String version = getVersion();

        String user = getUserRunning();

        String jvmVersion = getJvmVersion();
        String options = getJvmStartupOptions();

        String javaHome = System.getenv("JAVA_HOME");

        long memory = getMaxJvmMemory();

        log.info("-------------------------------------------------------");
        log.info(" Starting " + componentName + " (Version: " + version + ", " + "Rev:" + rev.commitId + ", "
                + "Date:" + rev.commitDate + ")");
        log.info(" Current user: " + user);
        log.info(" JVM: " + jvmVersion);
        log.info(" Startup Options: " + options);
        log.info(" Maximum heap size: " + memory + " MiBytes");
        log.info(" JAVA_HOME: " + (javaHome == null ? "not set" : javaHome));
        log.info("-------------------------------------------------------");
    }/*w  ww  . j a v  a 2  s. c o m*/
}

From source file:org.apache.hadoop.hbase.util.ReflectionUtils.java

/**
 * Log the current thread stacks at INFO level.
 * @param log the logger that logs the stack trace
 * @param title a descriptive title for the call stacks
 * @param minInterval the minimum time from the last
 *//*from   w w w .ja va 2  s  . c o m*/
public static void logThreadInfo(Log log, String title, long minInterval) {
    boolean dumpStack = false;
    if (log.isInfoEnabled()) {
        synchronized (ReflectionUtils.class) {
            long now = System.currentTimeMillis();
            if (now - previousLogTime >= minInterval * 1000) {
                previousLogTime = now;
                dumpStack = true;
            }
        }
        if (dumpStack) {
            try {
                ByteArrayOutputStream buffer = new ByteArrayOutputStream();
                printThreadInfo(new PrintStream(buffer, false, "UTF-8"), title);
                log.info(buffer.toString(Charset.defaultCharset().name()));
            } catch (UnsupportedEncodingException ignored) {
                log.warn("Could not write thread info about '" + title + "' due to a string encoding issue.");
            }
        }
    }
}

From source file:org.apache.hadoop.util.ReflectionUtils.java

/**
 * Log the current thread stacks at INFO level.
 * @param log the logger that logs the stack trace
 * @param title a descriptive title for the call stacks
 * @param minInterval the minimum time from the last 
 *//*  www.  java2  s. c o  m*/
public static void logThreadInfo(Log log, String title, long minInterval) {
    boolean dumpStack = false;
    if (log.isInfoEnabled()) {
        synchronized (ReflectionUtils.class) {
            long now = Time.now();
            if (now - previousLogTime >= minInterval * 1000) {
                previousLogTime = now;
                dumpStack = true;
            }
        }
        if (dumpStack) {
            ByteArrayOutputStream buffer = new ByteArrayOutputStream();
            printThreadInfo(new PrintWriter(buffer), title);
            log.info(buffer.toString());
        }
    }
}

From source file:org.apache.hadoop.yarn.applications.amonly.TestDistributedShell.java

@Test(timeout = 90000)
public void testDSShellWithCustomLogPropertyFile() throws Exception {
    final File basedir = new File("target", TestDistributedShell.class.getName());
    final File tmpDir = new File(basedir, "tmpDir");
    tmpDir.mkdirs();//from   w  ww .j  av a 2 s .  c  om
    final File customLogProperty = new File(tmpDir, "custom_log4j.properties");
    if (customLogProperty.exists()) {
        customLogProperty.delete();
    }
    if (!customLogProperty.createNewFile()) {
        Assert.fail("Can not create custom log4j property file.");
    }
    PrintWriter fileWriter = new PrintWriter(customLogProperty);
    // set the output to DEBUG level
    fileWriter.write("log4j.rootLogger=debug,stdout");
    fileWriter.close();
    String[] args = { "--jar", APPMASTER_JAR, "--num_containers", "3", "--shell_command", "echo",
            "--shell_args", "HADOOP", "--log_properties", customLogProperty.getAbsolutePath(),
            "--master_memory", "512", "--master_vcores", "2", "--container_memory", "128", "--container_vcores",
            "1" };

    //Before run the DS, the default the log level is INFO
    final Log LOG_Client = LogFactory.getLog(Client.class);
    Assert.assertTrue(LOG_Client.isInfoEnabled());
    Assert.assertFalse(LOG_Client.isDebugEnabled());
    final Log LOG_AM = LogFactory.getLog(ApplicationMaster.class);
    Assert.assertTrue(LOG_AM.isInfoEnabled());
    Assert.assertFalse(LOG_AM.isDebugEnabled());

    LOG.info("Initializing DS Client");
    final Client client = new Client(new Configuration(yarnCluster.getConfig()));
    boolean initSuccess = client.init(args);
    Assert.assertTrue(initSuccess);
    LOG.info("Running DS Client");
    boolean result = client.run();
    LOG.info("Client run completed. Result=" + result);
    Assert.assertTrue(verifyContainerLog(3, null, true, "DEBUG") > 10);
    //After DS is finished, the log level should be DEBUG
    Assert.assertTrue(LOG_Client.isInfoEnabled());
    Assert.assertTrue(LOG_Client.isDebugEnabled());
    Assert.assertTrue(LOG_AM.isInfoEnabled());
    Assert.assertTrue(LOG_AM.isDebugEnabled());
}

From source file:org.apache.hadoop.yarn.applications.distributedshell.TestDistributedShell.java

@Test
public void testDSShellWithCustomLogPropertyFile() throws Exception {
    final File basedir = new File("target", TestDistributedShell.class.getName());
    final File tmpDir = new File(basedir, "tmpDir");
    tmpDir.mkdirs();/*from   w w  w .j a  va2s  . c  o m*/
    final File customLogProperty = new File(tmpDir, "custom_log4j.properties");
    if (customLogProperty.exists()) {
        customLogProperty.delete();
    }
    if (!customLogProperty.createNewFile()) {
        Assert.fail("Can not create custom log4j property file.");
    }
    PrintWriter fileWriter = new PrintWriter(customLogProperty);
    // set the output to DEBUG level
    fileWriter.write("log4j.rootLogger=debug,stdout");
    fileWriter.close();
    String[] args = { "--jar", APPMASTER_JAR, "--num_containers", "3", "--shell_command", "echo",
            "--shell_args", "HADOOP", "--log_properties", customLogProperty.getAbsolutePath(),
            "--master_memory", "512", "--master_vcores", "2", "--container_memory", "128", "--container_vcores",
            "1" };

    //Before run the DS, the default the log level is INFO
    final Log LOG_Client = LogFactory.getLog(Client.class);
    Assert.assertTrue(LOG_Client.isInfoEnabled());
    Assert.assertFalse(LOG_Client.isDebugEnabled());
    final Log LOG_AM = LogFactory.getLog(ApplicationMaster.class);
    Assert.assertTrue(LOG_AM.isInfoEnabled());
    Assert.assertFalse(LOG_AM.isDebugEnabled());

    LOG.info("Initializing DS Client");
    final Client client = new Client(new Configuration(yarnCluster.getConfig()));
    boolean initSuccess = client.init(args);
    Assert.assertTrue(initSuccess);
    LOG.info("Running DS Client");
    boolean result = client.run();
    LOG.info("Client run completed. Result=" + result);
    Assert.assertTrue(verifyContainerLog(3, null, true, "DEBUG") > 10);
    //After DS is finished, the log level should be DEBUG
    Assert.assertTrue(LOG_Client.isInfoEnabled());
    Assert.assertTrue(LOG_Client.isDebugEnabled());
    Assert.assertTrue(LOG_AM.isInfoEnabled());
    Assert.assertTrue(LOG_AM.isDebugEnabled());
}

From source file:org.apache.jsieve.ScriptCoordinate.java

/**
 * Logs diagnotic information about the script coordinate.
 * //from w w  w  . ja v  a  2s.  c  o m
 * @param logger
 *            <code>Log</code>, not null
 */
public void logDiagnosticsInfo(Log logger) {
    if (logger.isInfoEnabled()) {
        logger.info("Expression starts line " + startLineNumber + " column " + startColumnNumber);
        logger.info("Expression ends line " + endLineNumber + " column " + endColumnNumber);
    }
}