List of usage examples for org.apache.commons.logging Log isDebugEnabled
boolean isDebugEnabled();
From source file:com.googlecode.jcimd.PacketSerializer.java
private static Packet doDeserializePacket(InputStream inputStream, int maxMessageSize, boolean useChecksum, Log logger) throws IOException { ByteArrayOutputStream temp = new ByteArrayOutputStream(); int b;/*from w w w . j av a 2 s . co m*/ while ((b = inputStream.read()) != END_OF_STREAM) { // Any data transmitted between packets SHALL be ignored. if (b == STX) { temp.write(b); break; } } if (b != STX) { //throw new SoftEndOfStreamException(); throw new IOException("End of stream reached and still no <STX> byte"); } // Read the input stream until ETX while ((b = inputStream.read()) != END_OF_STREAM) { temp.write(b); if (b == ETX) { break; } if (temp.size() >= maxMessageSize) { // Protect from buffer overflow throw new IOException( "Buffer overflow reached at " + temp.size() + " byte(s) and still no <ETX> byte"); } } if (b != ETX) { throw new IOException("End of stream reached and still no <ETX> byte"); } // Parse contents of "temp" (it contains the entire CIMD message // including STX and ETX bytes). byte bytes[] = temp.toByteArray(); if (logger.isTraceEnabled()) { logger.trace("Received " + bytes.length + " byte(s)"); } if (useChecksum) { // Read two (2) bytes, just before the ETX byte. StringBuilder buffer = new StringBuilder(2); buffer.append((char) bytes[bytes.length - 3]); buffer.append((char) bytes[bytes.length - 2]); try { int checksum = Integer.valueOf(buffer.toString(), 16); int expectedChecksum = calculateCheckSum(bytes, 0, bytes.length - 3); if (checksum != expectedChecksum) { throw new IOException("Checksum error: expecting " + expectedChecksum + " but got " + checksum); } } catch (NumberFormatException e) { throw new IOException("Checksum error: expecting HEX digits, but got " + buffer); } } // Deserialize bytes, minus STX, CC (check sum), and ETX. int end = useChecksum ? bytes.length - 3 : bytes.length - 1; Packet packet = deserializeFromByteArray(bytes, 1, end); if (logger.isDebugEnabled()) { logger.debug("Received " + packet); } return packet; }
From source file:hotbeans.support.FileSystemHotBeanModuleRepository.java
/** * Internal method to update a module.//from w w w. j a va2s.c o m */ protected HotBeanModuleInfo updateModuleInternal(String moduleName, final InputStream moduleFileStream, final boolean add) { long revisionNumber = -1; HotBeanModuleInfo hotBeanModuleInfo = null; Log logger = this.getLog(); synchronized (super.getLock()) { // If update - module name must be specified if (!add && ((moduleName == null) || (moduleName.trim().length() == 0))) throw new HotBeansException("Module name not specified!"); RepositoryFileLock fileLock = null; File moduleTempFile = null; InputStream moduleTempFileStream = null; try { // Save module file to temp file moduleTempFile = File.createTempFile("hotBeanModule", ".jar"); FileCopyUtils.copy(moduleFileStream, new FileOutputStream(moduleTempFile)); // Get name from mainfest Manifest manifest = ModuleManifestUtils.readManifest(moduleTempFile); String jarFileModuleName = ModuleManifestUtils.getName(manifest); if (logger.isDebugEnabled()) logger.debug("Module name in module manifest: '" + jarFileModuleName + "'."); // Validate name if (add) { if ((jarFileModuleName == null) || (jarFileModuleName.trim().length() == 0)) throw new InvalidModuleNameException("Module name not specified!"); else if (super.getHotBeanModule(jarFileModuleName) != null) throw new ModuleAlreadyExistsException("Module name already exists!"); } else if (!moduleName.equals(jarFileModuleName)) throw new InvalidModuleNameException( "Module name in jar file doesn't match specified module name!"); moduleName = jarFileModuleName; moduleTempFileStream = new FileInputStream(moduleTempFile); if (add & logger.isInfoEnabled()) logger.info("Adding module '" + moduleName + "'."); fileLock = this.obtainRepositoryFileLock(false); // Obtain lock File moduleDirectory = new File(this.moduleRepositoryDirectory, moduleName); if (!moduleDirectory.exists()) moduleDirectory.mkdirs(); // Get next revision number revisionNumber = this.getLastRevisionOnFileSystem(moduleName); if (logger.isDebugEnabled()) { if (add) logger.debug("Adding module - last revision on file system: " + revisionNumber + "."); else logger.debug("Updating module - last revision on file system: " + revisionNumber + "."); } if (revisionNumber < 0) revisionNumber = 0; File moduleFile = new File(moduleDirectory, revisionNumber + MODULE_FILE_SUFFIX); while (moduleFile.exists()) // This should't really be necessary, but still... { revisionNumber++; moduleFile = new File(moduleDirectory, revisionNumber + MODULE_FILE_SUFFIX); } if (logger.isDebugEnabled()) { if (add) logger.debug("Adding module - revision of new module: " + revisionNumber + "."); else logger.debug("Updating module - revision of new module: " + revisionNumber + "."); } // Save module file FileCopyUtils.copy(moduleTempFileStream, new FileOutputStream(moduleFile)); // Deploy at once hotBeanModuleInfo = this.loadModule(moduleName, revisionNumber); } catch (Exception e) { String moduleNameString = ""; if (moduleName != null) moduleNameString = "'" + moduleName + "' "; if (add) { logger.error("Error adding module " + moduleNameString + "- " + e, e); if (e instanceof HotBeansException) throw (HotBeansException) e; else throw new HotBeansException("Error adding module " + moduleNameString + "- " + e, e); } else { logger.error("Error updating module " + moduleNameString + "- " + e, e); if (e instanceof HotBeansException) throw (HotBeansException) e; else throw new HotBeansException("Error updating module " + moduleNameString + "- " + e, e); } } finally { this.releaseRepositoryFileLock(fileLock); fileLock = null; if (moduleTempFileStream != null) { // Delete temp file try { moduleTempFileStream.close(); } catch (Exception e) { } } if (moduleTempFile != null) FileDeletor.delete(moduleTempFile); } } return hotBeanModuleInfo; }
From source file:com.dhcc.framework.web.context.DhccContextLoader.java
/** * Initialize Spring's web application context for the given servlet context, * using the application context provided at construction time, or creating a new one * according to the "{@link #CONTEXT_CLASS_PARAM contextClass}" and * "{@link #CONFIG_LOCATION_PARAM contextConfigLocation}" context-params. * @param servletContext current servlet context * @return the new WebApplicationContext * @see #ContextLoader(WebApplicationContext) * @see #CONTEXT_CLASS_PARAM/*w w w .j av a 2 s . c o m*/ * @see #CONFIG_LOCATION_PARAM */ public WebApplicationContext initWebApplicationContext(ServletContext servletContext) { if (servletContext.getAttribute(WebApplicationContext.ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE) != null) { throw new IllegalStateException( "Cannot initialize context because there is already a root application context present - " + "check whether you have multiple ContextLoader* definitions in your web.xml!"); } Log logger = LogFactory.getLog(DhccContextLoader.class); servletContext.log("Initializing Spring root WebApplicationContext"); if (logger.isInfoEnabled()) { logger.info("Root WebApplicationContext: initialization started"); } long startTime = System.currentTimeMillis(); try { // Store context in local instance variable, to guarantee that // it is available on ServletContext shutdown. if (this.context == null) { this.context = createWebApplicationContext(servletContext); } if (this.context instanceof ConfigurableWebApplicationContext) { ConfigurableWebApplicationContext cwac = (ConfigurableWebApplicationContext) this.context; if (!cwac.isActive()) { // The context has not yet been refreshed -> provide services such as // setting the parent context, setting the application context id, etc if (cwac.getParent() == null) { // The context instance was injected without an explicit parent -> // determine parent for root web application context, if any. ApplicationContext parent = loadParentContext(servletContext); cwac.setParent(parent); } configureAndRefreshWebApplicationContext(cwac, servletContext); } } servletContext.setAttribute(WebApplicationContext.ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE, this.context); ClassLoader ccl = Thread.currentThread().getContextClassLoader(); if (ccl == DhccContextLoader.class.getClassLoader()) { currentContext = this.context; } else if (ccl != null) { currentContextPerThread.put(ccl, this.context); } if (logger.isDebugEnabled()) { logger.debug("Published root WebApplicationContext as ServletContext attribute with name [" + WebApplicationContext.ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE + "]"); } if (logger.isInfoEnabled()) { long elapsedTime = System.currentTimeMillis() - startTime; logger.info("Root WebApplicationContext: initialization completed in " + elapsedTime + " ms"); } return this.context; } catch (RuntimeException ex) { logger.error("Context initialization failed", ex); servletContext.setAttribute(WebApplicationContext.ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE, ex); throw ex; } catch (Error err) { logger.error("Context initialization failed", err); servletContext.setAttribute(WebApplicationContext.ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE, err); throw err; } }
From source file:com.github.cmisbox.core.Queue.java
public void manageEvent(LocalEvent event) { Log log = LogFactory.getLog(this.getClass()); log.debug("managing: " + event); // any platform // - a folder can be renamed before containing files are managed: on // folder rename all children must be updated while still in queue; // linux//from w ww.j av a2 s.c om // - if a file or folder is moved out of a watched folder it is reported // as a rename to null (check if it's still there) // mac osx // - recursive folder operations (e.g. unzip an archive or move a folder // inside a watched folder) are not reported, only root folder is // reported as create // - folder rename causes children to be notified as deleted (with old // path) try { if (event.isSynch()) { this.synchAllWatches(); return; } File f = new File(event.getFullFilename()); if (event.isCreate()) { StoredItem item = this.getSingleItem(event.getLocalPath()); if ((item != null) && (item.getLocalModified().longValue() >= f.lastModified())) { return; } String parent = f.getParent().substring(Config.getInstance().getWatchParent().length()); CmisObject obj = CMISRepository.getInstance().addChild(this.getSingleItem(parent).getId(), f); Storage.getInstance().add(f, obj); } else if (event.isDelete()) { StoredItem item = this.getSingleItem(event.getLocalPath()); if (f.exists()) { throw new Exception( String.format("File %s reported to be deleted but stil exists", f.getAbsolutePath())); } CMISRepository.getInstance().delete(item.getId()); Storage.getInstance().delete(item, true); } else if (event.isModify()) { if (f.isFile()) { StoredItem item = this.getSingleItem(event.getLocalPath()); if (item.getLocalModified().longValue() < f.lastModified()) { Document doc = CMISRepository.getInstance().update(item, f); Storage.getInstance().localUpdate(item, f, doc); } else { log.debug("file" + f + " modified in the past"); } } } else if (event.isRename()) { StoredItem item = this.getSingleItem(event.getLocalPath()); CmisObject obj = CMISRepository.getInstance().rename(item.getId(), f); Storage.getInstance().localUpdate(item, f, obj); } } catch (Exception e) { log.error(e); if (log.isDebugEnabled()) { e.printStackTrace(); } if (UI.getInstance().isAvailable()) { UI.getInstance().notify(e.toString()); UI.getInstance().setStatus(Status.KO); } } }
From source file:net.sourceforge.vulcan.metrics.dom.DomBuilderTest.java
public void testLoadSeleniumSuite() throws Exception { Log log = createMock(Log.class); expect(log.isDebugEnabled()).andReturn(false); DomBuilder.setLog(log);// ww w . ja v a 2 s . co m replay(); builder.merge(TestUtils.resolveRelativeFile("source/test/xml/SampleSeleniumReport.html")); verify(); }
From source file:net.sourceforge.vulcan.metrics.dom.DomBuilderTest.java
public void testLoadNCoverReport() throws Exception { Log log = createMock(Log.class); expect(log.isDebugEnabled()).andReturn(false); DomBuilder.setLog(log);/*from www . jav a 2 s .c om*/ replay(); builder.merge(TestUtils.resolveRelativeFile("source/test/xml/utf-8-header.xml")); verify(); }
From source file:org.acmsl.queryj.api.AbstractTemplateGenerator.java
/** * Performs the generation process./*from ww w.jav a 2 s . c om*/ * @param template the template. * @param caching whether template caching is enabled. * @param fileName the file name. * @param outputDir the output folder. * @param rootFolder the root folder. * @param charset the {@link Charset} to use. * @param fileUtils the {@link FileUtils} instance. * @param log the {@link Log} instance. * @return whether it gets written to disk. * @throws IOException if the template cannot be written to disk. * @throws QueryJBuildException if the template cannot be generated. */ protected boolean generate(@NotNull final N template, final boolean caching, @NotNull final String fileName, @NotNull final File outputDir, @NotNull final File rootFolder, @NotNull final Charset charset, @NotNull final FileUtils fileUtils, @Nullable final Log log) throws IOException, QueryJBuildException { boolean result = false; @Nullable final ST relevantStTemplate = template.generate(true); @Nullable final String relevantContent; if (relevantStTemplate != null) { relevantContent = relevantStTemplate.render(); } else { relevantContent = null; } if (relevantContent != null) { @NotNull final String newHash = computeHash(relevantContent, charset); @Nullable final String oldHash = retrieveHash(fileName, outputDir, rootFolder, charset, fileUtils); if ((oldHash == null) || (!newHash.equals(oldHash))) { result = true; } if (result) { @NotNull final String t_strOutputFile = outputDir.getAbsolutePath() + File.separator + fileName; if (caching) { serializeTemplate(template, getOutputDir(outputDir, rootFolder).getAbsolutePath() + File.separator + "." + fileName + ".ser"); } @Nullable final ST stTemplate = template.generate(false); @Nullable String t_strFileContents = ""; if (stTemplate != null) { try { t_strFileContents = stTemplate.render(); } catch (@NotNull final Throwable throwable) { @Nullable final Log t_Log = UniqueLogFactory.getLog(AbstractQueryJTemplate.class); if (t_Log != null) { t_Log.error("Error in template " + template.getTemplateName(), throwable); } /* @Nullable final STTreeView debugTool = new StringTemplateTreeView("Debugging " + getTemplateName(), t_Template); debugTool.setVisible(true); while (debugTool.isVisible()) { try { Thread.sleep(1000); } catch (InterruptedException e) { e.printStackTrace(); } }*/ } } if (!"".equals(t_strFileContents)) { @NotNull final File t_FinalDir = new File(t_strOutputFile).getParentFile(); final boolean folderCreated = t_FinalDir.mkdirs(); if ((!folderCreated) && (!outputDir.exists())) { throw new IOException("Cannot create output dir: " + t_FinalDir); } else if (t_strFileContents != null) { if ((log != null) && (log.isDebugEnabled())) { log.debug("Writing " + (t_strFileContents.length() * 2) + " bytes (" + charset + "): " + t_strOutputFile); } } if (t_strFileContents != null) { fileUtils.writeFile(t_strOutputFile, t_strFileContents, charset); } writeHash(newHash, fileName, outputDir, rootFolder, charset, fileUtils); } else { if ((log != null) && (log.isDebugEnabled())) { log.debug("Not writing " + t_strOutputFile + " since the generated content is empty"); } } } } return result; }
From source file:org.acmsl.queryj.api.MetaLanguageUtils.java
/** * Sets up the comment parser.// w w w. j a v a2s. c om * @param comment the comment to parse. * @return the {@link PerCommentParser} instance. */ @SuppressWarnings("unchecked") protected PerCommentParser setUpParser(@NotNull final String comment) throws RecognitionException { @NotNull final PerCommentParser result; @Nullable final Log t_Log = UniqueLogFactory.getLog(MetaLanguageUtils.class); if ((t_Log != null) && (t_Log.isDebugEnabled())) { t_Log.debug("Parsing '" + comment + "'"); } @NotNull final PerCommentLexer t_Lexer = new PerCommentLexer(new ANTLRInputStream(comment)); @NotNull final CommonTokenStream t_Tokens = new CommonTokenStream(t_Lexer); result = new PerCommentParser(t_Tokens); @NotNull final PerCommentListener listener = new PerCommentLoggingListener(comment); result.addParseListener(listener); return result; }
From source file:org.acmsl.queryj.customsql.handlers.customsqlvalidation.RetrieveQueryHandler.java
/** * Retrieves the current {@link Sql query}, and delegates * the flow to given chain.//from w w w . java2 s . com * @param command the command. * @param chain the chain. * @return {@code false} if everything went fine. * @throws QueryJBuildException if the operation fails. */ protected boolean handle(@NotNull final QueryJCommand command, @NotNull final CustomQueryChain chain) throws QueryJBuildException { int t_iIndex = retrieveCurrentSqlIndex(command); @Nullable final Log t_Log = UniqueLogFactory.getLog(RetrieveQueryHandler.class); @NotNull final List<Sql<String>> t_lSql = retrieveSqlList(command); final int t_iTotalQueries = t_lSql.size(); @Nullable final Chronometer t_Chronometer; if ((t_Log != null) && (t_Log.isInfoEnabled())) { t_Chronometer = new Chronometer(); t_Log.info("Validating up to " + t_iTotalQueries + " queries. It can take some time."); } else { t_Chronometer = null; } while ((t_iIndex > -1) && (t_iIndex < t_lSql.size())) { @NotNull final Sql<String> t_Sql = t_lSql.get(t_iIndex); setCurrentSql(t_Sql, command); if ((t_Log != null) && (t_Log.isDebugEnabled())) { t_Log.debug("[" + t_iIndex + "/" + t_iTotalQueries + "] / " + t_Sql.getId()); } setCurrentSqlIndex(t_iIndex++, command); chain.process(command); } if ((t_Log != null) && (t_Chronometer != null)) { t_Log.info("Validation took " + t_Chronometer.now()); } return false; }
From source file:org.acmsl.queryj.metadata.engines.oracle.OracleMetadataManager.java
/** * Processes the schema./*www . j a v a2 s.c o m*/ * @param tableNames the table names. * @param connection the database connection. * @param caseSensitiveness whether the checks are case sensitive or not. * @param metadataExtractionListener the metadata extraction listener. * @param metaLanguageUtils the {@link MetaLanguageUtils} instance. * @param metadataTypeManager the {@link MetadataTypeManager} instance. * @return the list of all table names. * @throws SQLException if the extraction fails. * @throws QueryJException if any other error occurs. */ @NotNull @SuppressWarnings("unused") protected List<Table<String, Attribute<String>, List<Attribute<String>>>> extractTableMetadata( @Nullable final List<String> tableNames, @NotNull final Connection connection, final boolean caseSensitiveness, @NotNull final MetadataExtractionListener metadataExtractionListener, @NotNull final MetaLanguageUtils metaLanguageUtils, @NotNull final MetadataTypeManager metadataTypeManager) throws SQLException, QueryJException { @NotNull final List<Table<String, Attribute<String>, List<Attribute<String>>>> result; @Nullable final Log t_Log = UniqueLogFactory.getLog(OracleMetadataManager.class); @Nullable SQLException sqlExceptionToThrow = null; @Nullable ResultSet t_rsResults = null; @Nullable PreparedStatement t_PreparedStatement = null; @Nullable TableIncompleteValueObject t_Table; @NotNull final Map<String, TableIncompleteValueObject> t_mTableMap = new HashMap<>(); @NotNull final Map<String, List<AttributeIncompleteValueObject>> t_mColumnMap = new HashMap<>(); @NotNull final Map<String, List<AttributeIncompleteValueObject>> t_mPrimaryKeyMap = new HashMap<>(); @NotNull final Map<String, List<ForeignKeyIncompleteValueObject>> t_mForeignKeyMap = new HashMap<>(); @NotNull final Map<String, List<AttributeIncompleteValueObject>> t_mForeignKeyAttributeMap = new HashMap<>(); try { @NotNull final String t_strQuery = "select c.table_name, " + "tc.comments table_comment, " + "c.column_name, " + "uc.comments column_comment, " + "c.data_type, " + "c.data_length, " + "c.data_precision, " + "c.data_scale, " + "c.nullable, " + "c.column_id, " + "cons.position pk_position, " + "fks.constraint_name fk_name, " + "fks.target_table, " + "fks.position fk_position " + "from user_tab_comments tc, user_col_comments uc, " + "user_tab_columns c " + LEFT_OUTER_JOIN + "select ucc.* " + "from user_cons_columns ucc, user_constraints uc " + "where uc.constraint_type = 'P' and uc.status = 'ENABLED' " + "and uc.constraint_name = ucc.constraint_name) cons " + "on c.table_name = cons.table_name and c.column_name = cons.column_name " + LEFT_OUTER_JOIN + "select rcon.constraint_name, " + "col.position, " + "rcol.table_name source_table, " + "con.table_name target_table, " + "rcol.column_name " + "from user_constraints con, " + "user_cons_columns col, " + "user_constraints rcon, " + "user_cons_columns rcol " + "where rcon.constraint_type = 'R' " + "and rcon.r_constraint_name = con.constraint_name " + "and col.table_name = con.table_name " + "and col.constraint_name = con.constraint_name " + "and rcol.table_name = rcon.table_name " + "and rcol.constraint_name = rcon.constraint_name " + "and rcol.position = col.position) fks " + "on c.table_name = fks.source_table and c.column_name = fks.column_name " + "where instr(tc.table_name, '$') = 0 " + "and tc.table_name = c.table_name " + "and tc.table_name = uc.table_name " + "and c.column_name = uc.column_name "; if ((t_Log != null) && (t_Log.isDebugEnabled())) { t_Log.debug("query:" + t_strQuery); } t_PreparedStatement = connection.prepareStatement(t_strQuery); } catch (@NotNull final SQLException invalidQuery) { sqlExceptionToThrow = invalidQuery; } if (t_PreparedStatement != null) { try { t_rsResults = t_PreparedStatement.executeQuery(); } catch (@NotNull final SQLException queryFailed) { sqlExceptionToThrow = queryFailed; } } if (t_rsResults != null) { try { while (t_rsResults.next()) { processRow(t_rsResults, t_mTableMap, t_mColumnMap, t_mPrimaryKeyMap, t_mForeignKeyMap, t_mForeignKeyAttributeMap, caseSensitiveness, metaLanguageUtils, metadataTypeManager); } } catch (@NotNull final SQLException errorIteratingResults) { sqlExceptionToThrow = errorIteratingResults; } } if (t_rsResults != null) { try { t_rsResults.close(); } catch (@NotNull final SQLException sqlException) { if (t_Log != null) { t_Log.error("Cannot close the result set.", sqlException); } } } if (t_PreparedStatement != null) { try { t_PreparedStatement.close(); } catch (@NotNull final SQLException sqlException) { if (t_Log != null) { t_Log.error("Cannot close the statement.", sqlException); } } } if (sqlExceptionToThrow != null) { throw sqlExceptionToThrow; } buildUpTables(t_mTableMap, t_mColumnMap, t_mPrimaryKeyMap, t_mForeignKeyMap, t_mForeignKeyAttributeMap, caseSensitiveness, metaLanguageUtils); result = cloneTables(t_mTableMap.values()); return result; }