Example usage for org.apache.commons.logging Log isDebugEnabled

List of usage examples for org.apache.commons.logging Log isDebugEnabled

Introduction

In this page you can find the example usage for org.apache.commons.logging Log isDebugEnabled.

Prototype

boolean isDebugEnabled();

Source Link

Document

Is debug logging currently enabled?

Usage

From source file:org.apache.fop.logging.LoggingElementListObserver.java

/** @see org.apache.fop.layoutmgr.ElementListObserver.Observer */
public void observe(List elementList, String category, String id) {
    Log log = LogFactory.getLog(LoggingElementListObserver.class.getName() + "." + category);
    if (!log.isDebugEnabled()) {
        return;/*from ww  w  . j a va  2s .  c om*/
    }
    log.debug(" ");
    int len = (elementList != null ? ElementListUtils.calcContentLength(elementList) : 0);
    log.debug("ElementList: category=" + category + ", id=" + id + ", len=" + len + "mpt");
    if (elementList == null) {
        log.debug("<<empty list>>");
        return;
    }
    ListIterator tempIter = elementList.listIterator();
    ListElement temp;
    while (tempIter.hasNext()) {
        temp = (ListElement) tempIter.next();
        if (temp.isBox()) {
            log.debug(tempIter.previousIndex() + ") " + temp);
        } else if (temp.isGlue()) {
            log.debug(tempIter.previousIndex() + ") " + temp);
        } else {
            log.debug(tempIter.previousIndex() + ") " + temp);
        }
        if (temp.getPosition() != null) {
            log.debug("            " + temp.getPosition());
        }
    }
    log.debug(" ");
}

From source file:org.apache.hadoop.fs.swift.util.SwiftUtils.java

public static void debug(Log log, String text, Object... args) {
    if (log.isDebugEnabled()) {
        log.debug(String.format(text, args));
    }/*from  w w  w .j av  a 2s .com*/
}

From source file:org.apache.hadoop.service.LoggingStateChangeListener.java

/**
 * Log events to the given log/*from  ww w .j  av a  2s .  c  o  m*/
 * @param log destination for events
 */
public LoggingStateChangeListener(Log log) {
    //force an NPE if a null log came in
    log.isDebugEnabled();
    this.log = log;
}

From source file:org.apache.hadoop.yarn.applications.amonly.TestDistributedShell.java

@Test(timeout = 90000)
public void testDSShellWithCustomLogPropertyFile() throws Exception {
    final File basedir = new File("target", TestDistributedShell.class.getName());
    final File tmpDir = new File(basedir, "tmpDir");
    tmpDir.mkdirs();/*w w  w.j  a va 2 s .co m*/
    final File customLogProperty = new File(tmpDir, "custom_log4j.properties");
    if (customLogProperty.exists()) {
        customLogProperty.delete();
    }
    if (!customLogProperty.createNewFile()) {
        Assert.fail("Can not create custom log4j property file.");
    }
    PrintWriter fileWriter = new PrintWriter(customLogProperty);
    // set the output to DEBUG level
    fileWriter.write("log4j.rootLogger=debug,stdout");
    fileWriter.close();
    String[] args = { "--jar", APPMASTER_JAR, "--num_containers", "3", "--shell_command", "echo",
            "--shell_args", "HADOOP", "--log_properties", customLogProperty.getAbsolutePath(),
            "--master_memory", "512", "--master_vcores", "2", "--container_memory", "128", "--container_vcores",
            "1" };

    //Before run the DS, the default the log level is INFO
    final Log LOG_Client = LogFactory.getLog(Client.class);
    Assert.assertTrue(LOG_Client.isInfoEnabled());
    Assert.assertFalse(LOG_Client.isDebugEnabled());
    final Log LOG_AM = LogFactory.getLog(ApplicationMaster.class);
    Assert.assertTrue(LOG_AM.isInfoEnabled());
    Assert.assertFalse(LOG_AM.isDebugEnabled());

    LOG.info("Initializing DS Client");
    final Client client = new Client(new Configuration(yarnCluster.getConfig()));
    boolean initSuccess = client.init(args);
    Assert.assertTrue(initSuccess);
    LOG.info("Running DS Client");
    boolean result = client.run();
    LOG.info("Client run completed. Result=" + result);
    Assert.assertTrue(verifyContainerLog(3, null, true, "DEBUG") > 10);
    //After DS is finished, the log level should be DEBUG
    Assert.assertTrue(LOG_Client.isInfoEnabled());
    Assert.assertTrue(LOG_Client.isDebugEnabled());
    Assert.assertTrue(LOG_AM.isInfoEnabled());
    Assert.assertTrue(LOG_AM.isDebugEnabled());
}

From source file:org.apache.hadoop.yarn.applications.distributedshell.TestDistributedShell.java

@Test
public void testDSShellWithCustomLogPropertyFile() throws Exception {
    final File basedir = new File("target", TestDistributedShell.class.getName());
    final File tmpDir = new File(basedir, "tmpDir");
    tmpDir.mkdirs();//from w w w. ja va2  s.co m
    final File customLogProperty = new File(tmpDir, "custom_log4j.properties");
    if (customLogProperty.exists()) {
        customLogProperty.delete();
    }
    if (!customLogProperty.createNewFile()) {
        Assert.fail("Can not create custom log4j property file.");
    }
    PrintWriter fileWriter = new PrintWriter(customLogProperty);
    // set the output to DEBUG level
    fileWriter.write("log4j.rootLogger=debug,stdout");
    fileWriter.close();
    String[] args = { "--jar", APPMASTER_JAR, "--num_containers", "3", "--shell_command", "echo",
            "--shell_args", "HADOOP", "--log_properties", customLogProperty.getAbsolutePath(),
            "--master_memory", "512", "--master_vcores", "2", "--container_memory", "128", "--container_vcores",
            "1" };

    //Before run the DS, the default the log level is INFO
    final Log LOG_Client = LogFactory.getLog(Client.class);
    Assert.assertTrue(LOG_Client.isInfoEnabled());
    Assert.assertFalse(LOG_Client.isDebugEnabled());
    final Log LOG_AM = LogFactory.getLog(ApplicationMaster.class);
    Assert.assertTrue(LOG_AM.isInfoEnabled());
    Assert.assertFalse(LOG_AM.isDebugEnabled());

    LOG.info("Initializing DS Client");
    final Client client = new Client(new Configuration(yarnCluster.getConfig()));
    boolean initSuccess = client.init(args);
    Assert.assertTrue(initSuccess);
    LOG.info("Running DS Client");
    boolean result = client.run();
    LOG.info("Client run completed. Result=" + result);
    Assert.assertTrue(verifyContainerLog(3, null, true, "DEBUG") > 10);
    //After DS is finished, the log level should be DEBUG
    Assert.assertTrue(LOG_Client.isInfoEnabled());
    Assert.assertTrue(LOG_Client.isDebugEnabled());
    Assert.assertTrue(LOG_AM.isInfoEnabled());
    Assert.assertTrue(LOG_AM.isDebugEnabled());
}

From source file:org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerUtils.java

public static boolean isBlacklisted(FiCaSchedulerApp application, FiCaSchedulerNode node, Log LOG) {
    if (application.isBlacklisted(node.getNodeName())) {
        if (LOG.isDebugEnabled()) {
            LOG.debug("Skipping 'host' " + node.getNodeName() + " for " + application.getApplicationId()
                    + " since it has been blacklisted");
        }//from  ww w .j  av a2s. c om
        return true;
    }

    if (application.isBlacklisted(node.getRackName())) {
        if (LOG.isDebugEnabled()) {
            LOG.debug("Skipping 'rack' " + node.getRackName() + " for " + application.getApplicationId()
                    + " since it has been blacklisted");
        }
        return true;
    }

    return false;
}

From source file:org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerAppUtils.java

public static boolean isPlaceBlacklisted(SchedulerApplicationAttempt application, SchedulerNode node, Log log) {
    if (application.isPlaceBlacklisted(node.getNodeName())) {
        if (log.isDebugEnabled()) {
            log.debug("Skipping 'host' " + node.getNodeName() + " for " + application.getApplicationId()
                    + " since it has been blacklisted");
        }/* w ww  . j  a  v a  2s.c om*/
        return true;
    }

    if (application.isPlaceBlacklisted(node.getRackName())) {
        if (log.isDebugEnabled()) {
            log.debug("Skipping 'rack' " + node.getRackName() + " for " + application.getApplicationId()
                    + " since it has been blacklisted");
        }
        return true;
    }

    return false;
}

From source file:org.apache.hawq.pxf.service.rest.AnalyzerResource.java

@GET
@Path("getEstimatedStats")
@Produces("application/json")
public Response getEstimatedStats(@Context ServletContext servletContext, @Context final HttpHeaders headers,
        @QueryParam("path") String path) throws Exception {

    if (Log.isDebugEnabled()) {
        StringBuilder startmsg = new StringBuilder(
                "ANALYZER/getEstimatedStats started for path \"" + path + "\"");
        if (headers != null) {
            for (String header : headers.getRequestHeaders().keySet()) {
                startmsg.append(" Header: ").append(header).append(" Value: ")
                        .append(headers.getRequestHeader(header));
            }/*from   w ww .  ja  v a2s.co  m*/
        }
        Log.debug(startmsg);
    }

    /* Convert headers into a regular map */
    Map<String, String> params = convertToCaseInsensitiveMap(headers.getRequestHeaders());

    /* Store protocol level properties and verify */
    final ProtocolData protData = new ProtocolData(params);
    SecuredHDFS.verifyToken(protData, servletContext);

    /*
     * Analyzer is a special case in which it is hard to tell if user didn't
     * specify one, or specified a profile that doesn't include one, or it's
     * an actual protocol violation. Since we can only test protocol level
     * logic, we assume (like before) that it's a user error, which is the
     * case in most likelihood. When analyzer module is removed in the near
     * future, this assumption will go away with it.
     */
    if (protData.getAnalyzer() == null) {
        throw new IllegalArgumentException(
                "PXF 'Analyzer' class was not found. Please supply it in the LOCATION clause or use it in a PXF profile in order to run ANALYZE on this table");
    }

    /* Create an analyzer instance with API level parameters */
    final Analyzer analyzer = AnalyzerFactory.create(protData);

    /*
       * Function queries the pxf Analyzer for the data fragments of the resource
     * The fragments are returned in a string formatted in JSON    
     */
    String jsonOutput = AnalyzerStats.dataToJSON(analyzer.getEstimatedStats(path));

    return Response.ok(jsonOutput, MediaType.APPLICATION_JSON_TYPE).build();
}

From source file:org.apache.james.transport.mailets.jsieve.FileIntoAction.java

/**
 * <p>//w  w w.  j av  a 2s.  c  om
 * Executes the passed ActionFileInto.
 * </p>
 * 
 * <p>
 * This implementation accepts any destination with the root of <code>INBOX</code>.
 * </p>
 * 
 * <p>
 * As the current POP3 server does not support sub-folders, the mail is
 * stored in the INBOX for the recipient of the mail and the full intended
 * destination added as a prefix to the message's subject.
 * </p>
 * 
 * <p>
 * When IMAP support is added to James, it will be possible to support
 * sub-folders of <code>INBOX</code> fully.
 * </p>
 * 
 * @param anAction
 * @param aMail
 * @param context not null
 * @throws MessagingException
 */
public void execute(ActionFileInto anAction, Mail aMail, final ActionContext context)
        throws MessagingException {
    String destinationMailbox = anAction.getDestination();
    MailAddress recipient;
    boolean delivered = false;
    try {
        recipient = ActionUtils.getSoleRecipient(aMail);

        if (!(destinationMailbox.length() > 0 && destinationMailbox.charAt(0) == HIERARCHY_DELIMITER)) {
            destinationMailbox = HIERARCHY_DELIMITER + destinationMailbox;
        }

        final String mailbox = destinationMailbox.replace(HIERARCHY_DELIMITER, '/');
        final String url = "mailbox://" + recipient.asString() + mailbox;
        //TODO: copying this message so many times seems a waste
        context.post(url, aMail);
        delivered = true;
    } catch (MessagingException ex) {
        final Log log = context.getLog();
        if (log.isDebugEnabled()) {
            log.debug("Error while storing mail into. " + destinationMailbox, ex);
        }
        throw ex;
    }
    if (delivered) {
        final Log log = context.getLog();
        if (log.isDebugEnabled()) {
            log.debug("Filed Message ID: " + aMail.getMessage().getMessageID() + " into destination: \""
                    + destinationMailbox + "\"");
        }
    }
}

From source file:org.apache.james.transport.mailets.jsieve.RedirectAction.java

/**
 * Method execute executes the passed ActionRedirect.
 * //w  w w  .ja v  a 2 s . c o  m
 * @param anAction not nul
 * @param aMail not null
 * @param context not null
 * @throws MessagingException
 */
public void execute(ActionRedirect anAction, Mail aMail, ActionContext context) throws MessagingException {
    ActionUtils.detectAndHandleLocalLooping(aMail, context, "redirect");
    Collection<MailAddress> recipients = new ArrayList<MailAddress>(1);
    recipients.add(new MailAddress(new InternetAddress(anAction.getAddress())));
    MailAddress sender = aMail.getSender();
    context.post(sender, recipients, aMail.getMessage());
    Log log = context.getLog();
    if (log.isDebugEnabled()) {
        log.debug("Redirected Message ID: " + aMail.getMessage().getMessageID() + " to \""
                + anAction.getAddress() + "\"");
    }
}