List of usage examples for org.apache.commons.logging Log trace
void trace(Object message);
From source file:dk.netarkivet.common.utils.arc.ARCBatchJob.java
/** * Accepts only ARC and ARCGZ files. Runs through all records and calls * processRecord() on every record that is allowed by getFilter(). * Does nothing on a non-arc file.//from ww w .j a va2 s. c o m * * @param arcFile The ARC or ARCGZ file to be processed. * @param os the OutputStream to which output is to be written * @throws ArgumentNotValid if either argument is null * @return true, if file processed successful, otherwise false */ @Override public final boolean processFile(File arcFile, OutputStream os) throws ArgumentNotValid { ArgumentNotValid.checkNotNull(arcFile, "arcFile"); ArgumentNotValid.checkNotNull(os, "os"); Log log = LogFactory.getLog(getClass().getName()); long arcFileIndex = 0; boolean success = true; log.info("Processing ARCfile: " + arcFile.getName()); try { // This outer try-catch block catches all unexpected exceptions //Create an ARCReader and retrieve its Iterator: ARCReader arcReader = null; try { arcReader = ARCReaderFactory.get(arcFile); } catch (IOException e) { //Some IOException handleException(e, arcFile, arcFileIndex); return false; // Can't process file after exception } try { Iterator<? extends ArchiveRecord> it = arcReader.iterator(); /* Process all records from this Iterator: */ log.debug("Starting processing records in ARCfile '" + arcFile.getName() + "'."); if (!it.hasNext()) { log.debug("No ARCRecords found in ARCfile '" + arcFile.getName() + "'."); } ARCRecord record = null; while (it.hasNext()) { log.trace("At begin of processing-loop"); // Get a record from the file record = (ARCRecord) it.next(); // Process with the job try { if (!getFilter().accept(record)) { continue; } log.debug("Processing ARCRecord #" + noOfRecordsProcessed + " in ARCfile '" + arcFile.getName() + "'."); processRecord(record, os); ++noOfRecordsProcessed; } catch (NetarkivetException e) { // Our exceptions don't stop us success = false; // With our exceptions, we assume that just the // processing of this record got stopped, and we can // easily find the next handleOurException(e, arcFile, arcFileIndex); } catch (Exception e) { success = false; // Strange exceptions do stop us handleException(e, arcFile, arcFileIndex); // With strange exceptions, we don't know // if we've skipped records break; } // Close the record try { long arcRecordOffset = record.getBodyOffset() + record.getMetaData().getLength(); record.close(); arcFileIndex = arcRecordOffset; } catch (IOException ioe) { // Couldn't close an ARCRecord success = false; handleException(ioe, arcFile, arcFileIndex); // If close fails, we don't know if we've skipped // records break; } log.trace("At end of processing-loop"); } } finally { try { arcReader.close(); } catch (IOException e) { //Some IOException // TODO Discuss whether exceptions on close cause // filesFailed addition handleException(e, arcFile, arcFileIndex); } } } catch (Exception unexpectedException) { handleException(unexpectedException, arcFile, arcFileIndex); return false; } return success; }
From source file:dk.netarkivet.common.utils.warc.WARCBatchJob.java
/** * Accepts only WARC and WARCGZ files. Runs through all records and calls * processRecord() on every record that is allowed by getFilter(). * Does nothing on a non-arc file.//from w w w. ja v a 2 s.c o m * * @param warcFile The WARC or WARCGZ file to be processed. * @param os the OutputStream to which output is to be written * @throws ArgumentNotValid if either argument is null * @return true, if file processed successful, otherwise false */ public final boolean processFile(File warcFile, OutputStream os) throws ArgumentNotValid { ArgumentNotValid.checkNotNull(warcFile, "warcFile"); ArgumentNotValid.checkNotNull(os, "os"); Log log = LogFactory.getLog(getClass().getName()); long arcFileIndex = 0; boolean success = true; log.info("Processing WARCfile: " + warcFile.getName()); try { // This outer try-catch block catches all unexpected exceptions //Create an WARCReader and retrieve its Iterator: WARCReader warcReader = null; try { warcReader = WARCReaderFactory.get(warcFile); } catch (IOException e) { //Some IOException handleException(e, warcFile, arcFileIndex); return false; // Can't process file after exception } try { Iterator<? extends ArchiveRecord> it = warcReader.iterator(); /* Process all records from this Iterator: */ log.debug("Starting processing records in WARCfile '" + warcFile.getName() + "'."); if (!it.hasNext()) { log.debug("No WARCRecords found in WARCfile '" + warcFile.getName() + "'."); } WARCRecord record = null; while (it.hasNext()) { log.trace("At begin of processing-loop"); // Get a record from the file record = (WARCRecord) it.next(); // Process with the job try { if (!getFilter().accept(record)) { continue; } log.debug("Processing WARCRecord #" + noOfRecordsProcessed + " in WARCfile '" + warcFile.getName() + "'."); processRecord(record, os); ++noOfRecordsProcessed; } catch (NetarkivetException e) { // Our exceptions don't stop us success = false; // With our exceptions, we assume that just the // processing of this record got stopped, and we can // easily find the next handleOurException(e, warcFile, arcFileIndex); } catch (Exception e) { success = false; // Strange exceptions do stop us handleException(e, warcFile, arcFileIndex); // With strange exceptions, we don't know // if we've skipped records break; } // Close the record try { // TODO maybe this works, maybe not... long arcRecordOffset = record.getHeader().getContentBegin() + record.getHeader().getLength(); record.close(); arcFileIndex = arcRecordOffset; } catch (IOException ioe) { // Couldn't close an WARCRecord success = false; handleException(ioe, warcFile, arcFileIndex); // If close fails, we don't know if we've skipped // records break; } log.trace("At end of processing-loop"); } } finally { try { warcReader.close(); } catch (IOException e) { //Some IOException // TODO Discuss whether exceptions on close cause // filesFailed addition handleException(e, warcFile, arcFileIndex); } } } catch (Exception unexpectedException) { handleException(unexpectedException, warcFile, arcFileIndex); return false; } return success; }
From source file:dk.netarkivet.common.utils.archive.ArchiveBatchJob.java
/** * Accepts only arc(.gz) and warc(.gz) files. Runs through all records and calls * processRecord() on every record that is allowed by getFilter(). * Does nothing on a non-(w)arc file.//from w ww.ja va 2 s . c o m * * @param archiveFile The arc(.gz) or warc(.gz) file to be processed. * @param os the OutputStream to which output is to be written * @throws ArgumentNotValid if either argument is null * @return true, if file processed successful, otherwise false */ public final boolean processFile(File archiveFile, OutputStream os) throws ArgumentNotValid { ArgumentNotValid.checkNotNull(archiveFile, "archiveFile"); ArgumentNotValid.checkNotNull(os, "os"); Log log = LogFactory.getLog(getClass().getName()); long arcFileIndex = 0; boolean success = true; log.info("Processing archive file: " + archiveFile.getName()); try { // This outer try-catch block catches all unexpected exceptions //Create an ArchiveReader and retrieve its Iterator: ArchiveReader archiveReader = null; try { archiveReader = ArchiveReaderFactory.get(archiveFile); } catch (IOException e) { //Some IOException handleException(e, archiveFile, arcFileIndex); return false; // Can't process file after exception } try { Iterator<? extends ArchiveRecord> it = archiveReader.iterator(); /* Process all records from this Iterator: */ log.debug("Starting processing records in archive file '" + archiveFile.getName() + "'."); if (!it.hasNext()) { log.debug("No records found in archive file '" + archiveFile.getName() + "'."); } ArchiveRecord archiveRecord = null; ArchiveRecordBase record; while (it.hasNext()) { log.trace("At begin of processing-loop"); // Get a record from the file archiveRecord = (ArchiveRecord) it.next(); record = ArchiveRecordBase.wrapArchiveRecord(archiveRecord); // Process with the job try { if (!getFilter().accept(record)) { continue; } log.debug("Processing record #" + noOfRecordsProcessed + " in archive file '" + archiveFile.getName() + "'."); processRecord(record, os); ++noOfRecordsProcessed; } catch (NetarkivetException e) { // Our exceptions don't stop us success = false; // With our exceptions, we assume that just the // processing of this record got stopped, and we can // easily find the next handleOurException(e, archiveFile, arcFileIndex); } catch (Exception e) { success = false; // Strange exceptions do stop us handleException(e, archiveFile, arcFileIndex); // With strange exceptions, we don't know // if we've skipped records break; } // Close the record try { /* // FIXME: Don't know how to compute this for warc-files // computation for arc-files: long arcRecordOffset = // record.getBodyOffset() + record.getMetaData().getLength(); // computation for warc-files (experimental) long arcRecordOffset = record.getHeader().getOffset(); */ // TODO maybe this works, maybe not... long arcRecordOffset = archiveRecord.getHeader().getContentBegin() + archiveRecord.getHeader().getLength(); archiveRecord.close(); arcFileIndex = arcRecordOffset; } catch (IOException ioe) { // Couldn't close an WARCRecord success = false; handleException(ioe, archiveFile, arcFileIndex); // If close fails, we don't know if we've skipped // records break; } log.trace("At end of processing-loop"); } } finally { try { archiveReader.close(); } catch (IOException e) { //Some IOException // TODO Discuss whether exceptions on close cause // filesFailed addition handleException(e, archiveFile, arcFileIndex); } } } catch (Exception unexpectedException) { handleException(unexpectedException, archiveFile, arcFileIndex); return false; } return success; }
From source file:de.zib.gndms.logic.model.TaskAction.java
@SuppressWarnings({ "HardcodedFileSeparator" }) protected void trace(final @NotNull String userMsg, final Throwable cause) { final Log log1 = getLog(); final AbstractTask model = getModel(); final String msg; if (model == null) msg = userMsg;//from w w w.ja v a2 s.c o m else { final TaskState state = model.getState(); final String descr = model.getDescription(); msg = "TA of AbstractTask " + model.getId() + (state == null ? "" : '/' + state.toString()) + ':' + (userMsg.length() > 0 ? ' ' : "") + userMsg + (descr == null ? "" : " DESCR: '" + descr + '\''); } if (cause == null) log1.trace(msg); else log1.trace(msg, cause); }
From source file:com.icesoft.faces.env.AcegiAuthWrapper.java
public boolean isUserInRole(String role) { if (null == authentication) { return false; }/*from w w w . ja va 2s. c o m*/ Log.trace("isUserInRole ROLE: " + role); GrantedAuthority[] authorities = authentication.getAuthorities(); if (authentication.getPrincipal() == null || authorities == null) { return false; } for (int i = 0; i < authorities.length; i++) { if (role.equals(authorities[i].getAuthority())) { return true; } } return false; }
From source file:org.acmsl.queryj.metadata.engines.AbstractJdbcMetadataManager.java
/** * Logs a verbose message./* w w w. j av a2 s .co m*/ * @param message the message to log. */ protected void logVerbose(@NotNull final String message) { @Nullable final Log t_Log = UniqueLogFactory.getLog(AbstractJdbcMetadataManager.class); if (t_Log != null) { t_Log.trace(message); } }
From source file:org.alfresco.extension.bulkimport.util.LogUtils.java
public final static void trace(final Log log, final String message) { log.trace(PREFIX + message); }
From source file:org.alfresco.repo.transfer.AbstractManifestProcessorBase.java
/** * Puts information about current <code>childRef</code> and its <code>parentRef</code> into log in TRACE level. Information includes 'name', 'fromRepositoryId', 'aliened' and * 'invadedBy' properties. Additionally, collects the same information for children of <code>childRef</code> * /*from ww w.j av a 2s . co m*/ * @param parentRef - {@link NodeRef} instance of child node * @param childRef - {@link NodeRef} instance of parent of the <code>childRef</code> * @param nodeService - {@link NodeService} instance to get properties and checking other states * @param log - {@link Log} instance to put log for appropriate class */ protected void logInvasionHierarchy(NodeRef parentRef, NodeRef childRef, NodeService nodeService, Log log) { Map<QName, Serializable> properties = nodeService.getProperties(childRef); Map<QName, Serializable> parentProperties = nodeService.getProperties(parentRef); StringBuilder message = new StringBuilder("Information about '") .append(properties.get(ContentModel.PROP_NAME)).append("' node:\n fromRepositoryId: ") .append(properties.get(TransferModel.PROP_FROM_REPOSITORY_ID)).append("\n") .append(" invadedBy: ").append(properties.get(TransferModel.PROP_INVADED_BY)).append("\n") .append(" alien: ").append(nodeService.hasAspect(childRef, TransferModel.ASPECT_ALIEN)) .append("\n").append(" repositoryId: ").append(properties.get(TransferModel.PROP_REPOSITORY_ID)) .append("\n").append(" parent: ").append(parentProperties.get(ContentModel.PROP_NAME)) .append("(").append(parentProperties.get(TransferModel.PROP_FROM_REPOSITORY_ID)).append(")") .append(parentProperties.get(TransferModel.PROP_INVADED_BY)).append(": ") .append(nodeService.hasAspect(parentRef, TransferModel.ASPECT_ALIEN)).append("\n") .append(" children:\n"); List<ChildAssociationRef> childAssocs = nodeService.getChildAssocs(childRef); if ((null != childAssocs) && !childAssocs.isEmpty()) { for (ChildAssociationRef child : childAssocs) { properties = nodeService.getProperties(child.getChildRef()); message.append(" ").append(properties.get(ContentModel.PROP_NAME)).append("(") .append(properties.get(TransferModel.PROP_FROM_REPOSITORY_ID)).append(")") .append(properties.get(TransferModel.PROP_INVADED_BY)).append(": ") .append(nodeService.hasAspect(child.getChildRef(), TransferModel.ASPECT_ALIEN)) .append("\n"); } } log.trace(message.toString()); }
From source file:org.alfresco.repo.web.util.HttpRangeProcessor.java
/** * Stream a range of bytes from the given InputStream to the ServletOutputStream * //from w ww .j av a2 s. c o m * @param r Byte Range to process * @param is InputStream * @param os ServletOutputStream * @param offset Assumed InputStream position - to calculate skip bytes from * */ private void streamRangeBytes(final Range r, final InputStream is, final OutputStream os, long offset) throws IOException { final Log logger = getLogger(); final boolean trace = logger.isTraceEnabled(); // TODO: investigate using getFileChannel() on ContentReader if (r.start != 0L && r.start > offset) { long skipped = offset + is.skip(r.start - offset); if (skipped < r.start) { // Nothing left to download! return; } } long span = (r.end - r.start) + 1L; long bytesLeft = span; int read = 0; // Check that bytesLeft isn't greater than int can hold int bufSize; if (bytesLeft >= Integer.MAX_VALUE - 8) { bufSize = CHUNKSIZE; } else { bufSize = ((int) bytesLeft) < CHUNKSIZE ? (int) bytesLeft : CHUNKSIZE; } byte[] buf = new byte[bufSize]; while ((read = is.read(buf)) > 0 && bytesLeft != 0L) { os.write(buf, 0, read); bytesLeft -= (long) read; if (bytesLeft != 0L) { int resize; if (bytesLeft >= Integer.MAX_VALUE - 8) { resize = CHUNKSIZE; } else { resize = ((int) bytesLeft) < CHUNKSIZE ? (int) bytesLeft : CHUNKSIZE; } if (resize != buf.length) { buf = new byte[resize]; } } if (trace) logger.trace("...wrote " + read + " bytes, with " + bytesLeft + " to go..."); } }
From source file:org.alfresco.scripts.ScriptResourceHelper.java
/** * Resolve the import directives in the specified script. The implementation of the supplied * ScriptResourceLoader instance is responsible for handling the resource retrieval. * <p>//w ww.jav a 2s . c om * Multiple includes of the same resource are dealt with correctly and nested includes of scripts * is fully supported. * <p> * Note that for performance reasons the script import directive syntax and placement in the file * is very strict. The import lines <i>must</i> always be first in the file - even before any comments. * Immediately that the script service detects a non-import line it will assume the rest of the * file is executable script and no longer attempt to search for any further import directives. Therefore * all imports should be at the top of the script, one following the other, in the correct syntax and * with no comments present - the only separators valid between import directives is white space. * * @param script The script content to resolve imports in * * @return a valid script with all nested includes resolved into a single script instance */ public static String resolveScriptImports(String script, ScriptResourceLoader loader, Log logger) { // use a linked hashmap to preserve order of includes - the key in the collection is used // to resolve multiple includes of the same scripts and therefore cyclic includes also Map<String, String> scriptlets = new LinkedHashMap<String, String>(8, 1.0f); // perform a recursive resolve of all script imports recurseScriptImports(SCRIPT_ROOT, script, loader, scriptlets, logger); if (scriptlets.size() == 1) { // quick exit for single script with no includes if (logger.isTraceEnabled()) logger.trace("Script content resolved to:\r\n" + script); return script; } else { // calculate total size of buffer required for the script and all includes int length = 0; for (String scriptlet : scriptlets.values()) { length += scriptlet.length(); } // append the scripts together to make a single script StringBuilder result = new StringBuilder(length); for (String scriptlet : scriptlets.values()) { result.append(scriptlet); } if (logger.isTraceEnabled()) logger.trace("Script content resolved to:\r\n" + result.toString()); return result.toString(); } }