List of usage examples for org.apache.commons.logging Log info
void info(Object message);
From source file:org.apache.logging.log4j.jcl.LoggerTest.java
@Test public void testLog() { final Log logger = LogFactory.getLog("LoggerTest"); logger.debug("Test message"); verify("List", "o.a.l.l.j.LoggerTest Test message MDC{}" + Strings.LINE_SEPARATOR); logger.debug("Exception: ", new NullPointerException("Test")); verify("List", "o.a.l.l.j.LoggerTest Exception: MDC{}" + Strings.LINE_SEPARATOR); logger.info("Info Message"); verify("List", "o.a.l.l.j.LoggerTest Info Message MDC{}" + Strings.LINE_SEPARATOR); logger.info("Info Message {}"); verify("List", "o.a.l.l.j.LoggerTest Info Message {} MDC{}" + Strings.LINE_SEPARATOR); }
From source file:org.apache.pig.impl.plan.CompilationMessageCollector.java
private static void logMessage(String messageString, MessageType messageType, Log log) { switch (messageType) { case Info:/*from w w w . j a va2s . c o m*/ log.info(messageString); break; case Warning: log.warn(messageString); break; case Error: log.error(messageString); } }
From source file:org.apache.tajo.engine.planner.physical.PhysicalExec.java
protected void info(Log log, String message) { log.info("[" + context.getTaskId() + "] " + message); }
From source file:org.apache.tajo.querymaster.DefaultTaskScheduler.java
protected void info(Log log, String message) { log.info(String.format("[%s] %s", stage.getId(), message)); }
From source file:org.apache.tajo.util.StringUtils.java
/** * Print a log message for starting up and shutting down * @param clazz the class of the server//from ww w. jav a2 s .c o m * @param args arguments * @param LOG the target log object */ public static void startupShutdownMessage(Class<?> clazz, String[] args, final org.apache.commons.logging.Log LOG) { final String hostname = org.apache.hadoop.net.NetUtils.getHostname(); final String classname = clazz.getSimpleName(); LOG.info(toStartupShutdownString("STARTUP_MSG: ", new String[] { "Starting " + classname, " host = " + hostname, " args = " + Arrays.asList(args), " version = " + org.apache.tajo.util.VersionInfo.getVersion(), " classpath = " + System.getProperty("java.class.path"), " build = " + org.apache.tajo.util.VersionInfo.getUrl() + " -r " + org.apache.tajo.util.VersionInfo.getRevision() + "; compiled by '" + org.apache.tajo.util.VersionInfo.getUser() + "' on " + org.apache.tajo.util.VersionInfo.getDate(), " java = " + System.getProperty("java.version") })); if (SystemUtils.IS_OS_UNIX) { try { SignalLogger.INSTANCE.register(LOG); } catch (Throwable t) { LOG.warn("failed to register any UNIX signal loggers: ", t); } } ShutdownHookManager.get().addShutdownHook(new Runnable() { @Override public void run() { LOG.info(toStartupShutdownString("SHUTDOWN_MSG: ", new String[] { "Shutting down " + classname + " at " + hostname })); } }, SHUTDOWN_HOOK_PRIORITY); }
From source file:org.apache.tez.mapreduce.examples.TezExampleBase.java
/** * @param dag the dag to execute * @param printCounters whether to print counters or not * @param logger the logger to use while printing diagnostics * @return//from w ww . ja v a 2 s.com * @throws TezException * @throws InterruptedException * @throws IOException */ public int runDag(DAG dag, boolean printCounters, Log logger) throws TezException, InterruptedException, IOException { tezClientInternal.waitTillReady(); DAGClient dagClient = tezClientInternal.submitDAG(dag); Set<StatusGetOpts> getOpts = Sets.newHashSet(); if (printCounters) { getOpts.add(StatusGetOpts.GET_COUNTERS); } DAGStatus dagStatus; dagStatus = dagClient.waitForCompletionWithStatusUpdates(getOpts); if (dagStatus.getState() != DAGStatus.State.SUCCEEDED) { logger.info("DAG diagnostics: " + dagStatus.getDiagnostics()); return -1; } return 0; }
From source file:org.apache.tez.mapreduce.hadoop.mapred.MRCounters.java
@Override public void log(Log log) { log.info("Counters: " + size()); for (Group group : this) { log.info(" " + group.getDisplayName()); for (Counter counter : group) { log.info(" " + counter.getDisplayName() + "=" + counter.getCounter()); }//ww w. j av a 2 s. c o m } }
From source file:org.apache.tez.runtime.library.common.shuffle.ShuffleUtils.java
public static void shuffleToMemory(byte[] shuffleData, InputStream input, int decompressedLength, int compressedLength, CompressionCodec codec, boolean ifileReadAhead, int ifileReadAheadLength, Log LOG, String identifier) throws IOException { try {//from www. j a v a 2s. c o m IFile.Reader.readToMemory(shuffleData, input, compressedLength, codec, ifileReadAhead, ifileReadAheadLength); // metrics.inputBytes(shuffleData.length); LOG.info("Read " + shuffleData.length + " bytes from input for " + identifier); } catch (IOException ioe) { // Close the streams IOUtils.cleanup(LOG, input); // Re-throw throw ioe; } }
From source file:org.apache.tez.runtime.library.common.shuffle.ShuffleUtils.java
public static void shuffleToDisk(OutputStream output, String hostIdentifier, InputStream input, long compressedLength, Log LOG, String identifier) throws IOException { // Copy data to local-disk long bytesLeft = compressedLength; try {//w ww.j av a2s . c o m final int BYTES_TO_READ = 64 * 1024; byte[] buf = new byte[BYTES_TO_READ]; while (bytesLeft > 0) { int n = input.read(buf, 0, (int) Math.min(bytesLeft, BYTES_TO_READ)); if (n < 0) { throw new IOException("read past end of stream reading " + identifier); } output.write(buf, 0, n); bytesLeft -= n; // metrics.inputBytes(n); } LOG.info("Read " + (compressedLength - bytesLeft) + " bytes from input for " + identifier); output.close(); } catch (IOException ioe) { // Close the streams IOUtils.cleanup(LOG, input, output); // Re-throw throw ioe; } // Sanity check if (bytesLeft != 0) { throw new IOException("Incomplete map output received for " + identifier + " from " + hostIdentifier + " (" + bytesLeft + " bytes missing of " + compressedLength + ")"); } }
From source file:org.apache.tez.runtime.library.shuffle.common.ShuffleUtils.java
@SuppressWarnings("resource") public static void shuffleToMemory(MemoryFetchedInput fetchedInput, InputStream input, int decompressedLength, int compressedLength, CompressionCodec codec, boolean ifileReadAhead, int ifileReadAheadLength, Log LOG) throws IOException { IFileInputStream checksumIn = new IFileInputStream(input, compressedLength, ifileReadAhead, ifileReadAheadLength);/*from w w w. j a v a 2s.c o m*/ input = checksumIn; // Are map-outputs compressed? if (codec != null) { Decompressor decompressor = CodecPool.getDecompressor(codec); decompressor.reset(); input = codec.createInputStream(input, decompressor); } // Copy map-output into an in-memory buffer byte[] shuffleData = fetchedInput.getBytes(); try { IOUtils.readFully(input, shuffleData, 0, shuffleData.length); // metrics.inputBytes(shuffleData.length); LOG.info("Read " + shuffleData.length + " bytes from input for " + fetchedInput.getInputAttemptIdentifier()); } catch (IOException ioe) { // Close the streams IOUtils.cleanup(LOG, input); // Re-throw throw ioe; } }