List of usage examples for org.apache.commons.logging Log debug
void debug(Object message);
From source file:com.swordlord.gozer.datatypeformat.DataTypeHelper.java
/** * Return compatible class for typedValue based on untypedValueClass * /*from ww w.j a va 2 s . co m*/ * @param untypedValueClass * @param typedValue * @return */ public static Object fromDataType(Class<?> untypedValueClass, Object typedValue) { Log LOG = LogFactory.getLog(DataTypeHelper.class); if (typedValue == null) { return null; } if (untypedValueClass == null) { return typedValue; } if (ClassUtils.isAssignable(typedValue.getClass(), untypedValueClass)) { return typedValue; } String strTypedValue = null; boolean isStringTypedValue = typedValue instanceof String; Number numTypedValue = null; boolean isNumberTypedValue = typedValue instanceof Number; Boolean boolTypedValue = null; boolean isBooleanTypedValue = typedValue instanceof Boolean; Date dateTypedValue = null; boolean isDateTypedValue = typedValue instanceof Date; if (isStringTypedValue) { strTypedValue = (String) typedValue; } if (isNumberTypedValue) { numTypedValue = (Number) typedValue; } if (isBooleanTypedValue) { boolTypedValue = (Boolean) typedValue; } if (isDateTypedValue) { dateTypedValue = (Date) typedValue; } Object v = null; if (String.class.equals(untypedValueClass)) { v = ObjectUtils.toString(typedValue); } else if (BigDecimal.class.equals(untypedValueClass)) { if (isStringTypedValue) { v = NumberUtils.createBigDecimal(strTypedValue); } else if (isNumberTypedValue) { v = new BigDecimal(numTypedValue.doubleValue()); } else if (isBooleanTypedValue) { v = new BigDecimal(BooleanUtils.toInteger(boolTypedValue.booleanValue())); } else if (isDateTypedValue) { v = new BigDecimal(dateTypedValue.getTime()); } } else if (Boolean.class.equals(untypedValueClass)) { if (isStringTypedValue) { v = BooleanUtils.toBooleanObject(strTypedValue); } else if (isNumberTypedValue) { v = BooleanUtils.toBooleanObject(numTypedValue.intValue()); } else if (isDateTypedValue) { v = BooleanUtils.toBooleanObject((int) dateTypedValue.getTime()); } } else if (Byte.class.equals(untypedValueClass)) { if (isStringTypedValue) { v = Byte.valueOf(strTypedValue); } else if (isNumberTypedValue) { v = new Byte(numTypedValue.byteValue()); } else if (isBooleanTypedValue) { v = new Byte((byte) BooleanUtils.toInteger(boolTypedValue.booleanValue())); } else if (isDateTypedValue) { v = new Byte((byte) dateTypedValue.getTime()); } } else if (byte[].class.equals(untypedValueClass)) { if (isStringTypedValue) { v = strTypedValue.getBytes(); } } else if (Double.class.equals(untypedValueClass)) { if (isStringTypedValue) { v = NumberUtils.createDouble(strTypedValue); } else if (isNumberTypedValue) { v = new Double(numTypedValue.doubleValue()); } else if (isBooleanTypedValue) { v = new Double(BooleanUtils.toInteger(boolTypedValue.booleanValue())); } else if (isDateTypedValue) { v = new Double(dateTypedValue.getTime()); } } else if (Float.class.equals(untypedValueClass)) { if (isStringTypedValue) { v = NumberUtils.createFloat(strTypedValue); } else if (isNumberTypedValue) { v = new Float(numTypedValue.floatValue()); } else if (isBooleanTypedValue) { v = new Float(BooleanUtils.toInteger(boolTypedValue.booleanValue())); } else if (isDateTypedValue) { v = new Float(dateTypedValue.getTime()); } } else if (Short.class.equals(untypedValueClass)) { if (isStringTypedValue) { v = NumberUtils.createInteger(strTypedValue); } else if (isNumberTypedValue) { v = new Integer(numTypedValue.intValue()); } else if (isBooleanTypedValue) { v = BooleanUtils.toIntegerObject(boolTypedValue.booleanValue()); } else if (isDateTypedValue) { v = new Integer((int) dateTypedValue.getTime()); } } else if (Integer.class.equals(untypedValueClass)) { if (isStringTypedValue) { v = NumberUtils.createInteger(strTypedValue); } else if (isNumberTypedValue) { v = new Integer(numTypedValue.intValue()); } else if (isBooleanTypedValue) { v = BooleanUtils.toIntegerObject(boolTypedValue.booleanValue()); } else if (isDateTypedValue) { v = new Integer((int) dateTypedValue.getTime()); } } else if (Long.class.equals(untypedValueClass)) { if (isStringTypedValue) { v = NumberUtils.createLong(strTypedValue); } else if (isNumberTypedValue) { v = new Long(numTypedValue.longValue()); } else if (isBooleanTypedValue) { v = new Long(BooleanUtils.toInteger(boolTypedValue.booleanValue())); } else if (isDateTypedValue) { v = new Long(dateTypedValue.getTime()); } } else if (java.sql.Date.class.equals(untypedValueClass)) { if (isNumberTypedValue) { v = new java.sql.Date(numTypedValue.longValue()); } else if (isDateTypedValue) { v = new java.sql.Date(dateTypedValue.getTime()); } } else if (java.sql.Time.class.equals(untypedValueClass)) { if (isNumberTypedValue) { v = new java.sql.Time(numTypedValue.longValue()); } else if (isDateTypedValue) { v = new java.sql.Time(dateTypedValue.getTime()); } } else if (java.sql.Timestamp.class.equals(untypedValueClass)) { if (isNumberTypedValue) { v = new java.sql.Timestamp(numTypedValue.longValue()); } else if (isDateTypedValue) { v = new java.sql.Timestamp(dateTypedValue.getTime()); } } else if (Date.class.equals(untypedValueClass)) { if (isNumberTypedValue) { v = new Date(numTypedValue.longValue()); } else if (isStringTypedValue) { try { v = DateFormat.getDateInstance().parse(strTypedValue); } catch (ParseException e) { LOG.error("Unable to parse the date : " + strTypedValue); LOG.debug(e.getMessage()); } } } return v; }
From source file:ome.logic.AdminImpl.java
@RolesAllowed("system") @Transactional(readOnly = false)/*from www . j ava 2 s . co m*/ public void synchronizeLoginCache() { final Log log = getBeanHelper().getLogger(); final List<Map<String, Object>> dnIds = ldapUtil.lookupLdapAuthExperimenters(); if (dnIds.size() > 0) { log.info("Synchronizing " + dnIds.size() + " ldap user(s)"); } for (Map<String, Object> dnId : dnIds) { String dn = (String) dnId.get("dn"); Long id = (Long) dnId.get("experimenter_id"); try { Experimenter e = userProxy(id); ldapUtil.synchronizeLdapUser(e.getOmeName()); } catch (ApiUsageException aue) { // User likely doesn't exist log.debug("User not found: " + dn); } catch (Exception e) { log.error("synchronizeLdapUser:" + dnId, e); } } context.publishEvent(new UserGroupUpdateEvent(this)); }
From source file:ome.logic.LdapImpl.java
/** * The ids in "minus" will be removed from the ids in "base" and then * the operation chosen by "add" will be run on them. This method * ignores all methods known by Roles.//from ww w . ja v a 2 s . co m * * @param e * @param base * @param minus * @param add */ private void modifyGroups(Experimenter e, Collection<Long> base, Collection<Long> minus, boolean add) { final Log log = getBeanHelper().getLogger(); Set<Long> ids = new HashSet<Long>(base); ids.removeAll(minus); // Take no actions on system/user group. ids.remove(roles.getSystemGroupId()); ids.remove(roles.getUserGroupId()); if (ids.size() > 0) { log.info(String.format("%s groups for %s: %s", add ? "Adding" : "Removing", e.getOmeName(), ids)); Set<ExperimenterGroup> grps = new HashSet<ExperimenterGroup>(); for (Long id : ids) { grps.add(new ExperimenterGroup(id, false)); } if (add) { provider.addGroups(e, grps.toArray(new ExperimenterGroup[0])); } else { provider.removeGroups(e, grps.toArray(new ExperimenterGroup[0])); } if (add) { // If we have just added groups, then it's possible that // the "user" groupis at the front of the list, in which // case we should assign another specific group. e = iQuery.get(Experimenter.class, e.getId()); log.debug("sizeOfGroupExperimenterMap=" + e.sizeOfGroupExperimenterMap()); if (e.sizeOfGroupExperimenterMap() > 1) { GroupExperimenterMap primary = e.getGroupExperimenterMap(0); GroupExperimenterMap next = e.getGroupExperimenterMap(1); log.debug("primary=" + primary.parent().getId()); log.debug("next=" + next.parent().getId()); if (primary.parent().getId().equals(roles.getUserGroupId())) { log.debug("calling setDefaultGroup"); provider.setDefaultGroup(e, next.parent()); } } } } }
From source file:ome.util.ReflectionUtils.java
public static void findFieldsOfClass(Class target, Object o, String path, Log log, Set done) { if (null == path || path.equals("")) { path = "\nthis"; }/*from w ww . j a v a 2 s. c om*/ if (null == done) { done = new HashSet(); } if (done.contains(o)) { return; } done.add(o); if (target.isInstance(o)) { log.info(path + ";" + "\n----------------------\n" + o.toString() + " < " + o.getClass()); } else if (o instanceof Set) { for (Iterator it = ((Set) o).iterator(); it.hasNext();) { Object element = it.next(); findFieldsOfClass(target, element, path, log, done); } } else { Method[] accessors = getGettersAndSetters(o); log.debug(accessors); for (int i = 0; i < accessors.length; i++) { Method method = accessors[i]; if (method.getName().startsWith("get")) { log.debug("Trying " + method); Object obj = invokeGetter(o, method); if (null != obj) { findFieldsOfClass(target, obj, path + ".\n" + method.getName() + "()", log, done); } } } } }
From source file:oracle.kv.hadoop.hive.table.TableFieldTypeEnum.java
public static boolean kvHiveTypesMatch(FieldDef kvFieldDef, TypeInfo hiveColumnType) { final Log LOG = LogFactory.getLog(TableFieldTypeEnum.class.getName()); /* Compare top-level types. */ final FieldDef.Type kvFieldType = kvFieldDef.getType(); if (!fromKvType(kvFieldType).equals(fromHiveType(hiveColumnType, kvFieldType))) { /* Special case: KV type ENUM & Hive type STRING handled below. */ if (!(TABLE_FIELD_ENUM.equals(fromKvType(kvFieldType)) && TABLE_FIELD_STRING.equals(fromHiveType(hiveColumnType)))) { LOG.error("Field type MISMATCH: " + fromKvType(kvFieldType) + " != " + fromHiveType(hiveColumnType, kvFieldType)); return false; }// w w w. j a v a2 s.c o m } /* If top-level types are primitive and match, then it's a match. */ if (isPrimitive(kvFieldType) && isPrimitive(hiveColumnType)) { return true; } /* Top-level types match, but neither are primitive; deep dive. */ switch (kvFieldType) { /* If kvType is ENUM, then Hive type must be STRING */ case ENUM: if (!TABLE_FIELD_STRING.equals(fromHiveType(hiveColumnType))) { LOG.error("Field type MISMATCH: for KV ENUM field type, " + "expected Hive STRING column type, but Hive " + "column type is " + hiveColumnType.getTypeName()); return false; } return true; /* * If kvType is ARRAY, then Hive type must be LIST, and must have * matching element type. */ case ARRAY: if (!Category.LIST.equals(hiveColumnType.getCategory())) { LOG.error("Field type MISMATCH: for KV ARRAY field " + "type, expected Hive LIST column type, but " + "Hive column type is " + hiveColumnType.getCategory()); return false; } final TypeInfo hiveElementType = ((ListTypeInfo) hiveColumnType).getListElementTypeInfo(); final ArrayValue kvArrayValue = kvFieldDef.createArray(); final ArrayDef kvArrayDef = kvArrayValue.getDefinition(); final FieldDef kvElementDef = kvArrayDef.getElement(); LOG.debug("KV ARRAY field type and Hive LIST column type: " + "comparing KV ARRAY element type [" + kvElementDef.getType() + "] with Hive LIST " + "element type [" + hiveElementType + "]"); return kvHiveTypesMatch(kvElementDef, hiveElementType); /* * If kvType is MAP, then Hive type must be MAP<STRING, type>, and * must have matching value types. */ case MAP: if (!Category.MAP.equals(hiveColumnType.getCategory())) { LOG.error( "Field type MISMATCH: for KV MAP field type, " + "expected Hive MAP column type, but Hive " + "column type is " + hiveColumnType.getCategory()); return false; } final TypeInfo hiveMapKeyType = ((MapTypeInfo) hiveColumnType).getMapKeyTypeInfo(); final TypeInfo hiveMapValType = ((MapTypeInfo) hiveColumnType).getMapValueTypeInfo(); /* Hive key type must be STRING. */ if (!TABLE_FIELD_STRING.equals(fromHiveType(hiveMapKeyType))) { LOG.error( "Field type MISMATCH: for KV MAP field type " + "and Hive MAP column type, expected STRING " + "key type, but Hive MAP column's key type is " + fromHiveType(hiveMapKeyType)); return false; } /* Hive value type must match kv value type. */ final MapValue kvMapValue = kvFieldDef.createMap(); final MapDef kvMapDef = kvMapValue.getDefinition(); final FieldDef kvMapValueDef = kvMapDef.getElement(); LOG.debug("KV MAP field type and Hive MAP column type: " + "comparing KV MAP value type [" + kvMapValueDef.getType() + "] with Hive MAP " + "value type [" + hiveMapValType + "]"); return kvHiveTypesMatch(kvMapValueDef, hiveMapValType); /* * If kvType is RECORD, then Hive type must be STRUCT, and must * have same element types. */ case RECORD: if (!Category.STRUCT.equals(hiveColumnType.getCategory())) { LOG.error( "Field type MISMATCH: for KV RECORD field " + "type, expected Hive STRUCT column type, but " + "Hive column type is " + hiveColumnType.getCategory()); return false; } /* * Hive STRUCT field names and corresponding field types must * match KV RECORD field names. * * -- NOTE -- * * KV field names (and table and index names), as well as * the names of the elements of a RECORD field, are case * INSENSITIVE, but case PRESERVING. For example, if a * KV table is created with a RECORD field having two elements * named "MY_ELEMENT_1" and "MY_ELEMENT_2", those elements * can be referenced using the Strings "my_element_1" and * "my_element_2" (or "mY_eLEment_1" and "MY_element_2", etc.). * But when the element names are retrieved (via * RecordDef.getFields() for example), the names returned * for the desired elements will always be in the case * used when the RECORD was originally created; that is, * "MY_ELEMENT_1" and "MY_ELEMENT_2". Compare this with how * Hive handles case in its STRUCT data type. * * Recall that the Hive SerDeParameters.getColumnNames() * method returns the names of a Hive table's columns (which * correspond to a KV table's top-level fields) all in * LOWER CASE. Unfortunately, Hive seems to handle case for the * names of the elements of a Hive STRUCT differently. When the * names of the elements of a Hive STRUCT are retrieved using * the Hive StructTypeInfo.getAllStructFieldNames() method, * the case of those names appears to be PRESERVED, rather * than changed to all lower case; as is done for the column * names. That is, if the element names of a Hive STRUCT * are, for example, "mY_eLEment_1" and "MY_element_2", then * StructTypeInfo.getAllStructFieldNames() will return * "mY_eLEment_1" and "MY_element_2"; rather than * "my_element_1" and "my_element_2" (or "MY_ELEMENT_1" and * "MY_ELEMENT_2"). As a result, when validating the element * names of the KV RECORD with the corresponding Hive STRUCT * element names below, the retrieved element names for both * the KV RECORD and the Hive STRUCT are all converted to * lower case before performing the comparison. */ final List<String> hiveRecFieldNames = ((StructTypeInfo) hiveColumnType).getAllStructFieldNames(); final RecordValue kvRecValue = kvFieldDef.createRecord(); final RecordDef kvRecDef = kvRecValue.getDefinition(); final List<String> kvRecFieldNames = kvRecDef.getFieldNames(); /* Validate number of RECORD elements & STRUCT elements. */ if (hiveRecFieldNames.size() != kvRecFieldNames.size()) { LOG.error("Field type MISMATCH: for KV RECORD field " + "type and Hive STRUCT column type, number of " + "KV RECORD elements [" + kvRecFieldNames.size() + "] != number of " + "Hive STRUCT elements [" + hiveRecFieldNames.size() + "]. " + "\nKV RECORD element names = " + kvRecFieldNames + "\nHive STRUCT element names = " + hiveRecFieldNames); return false; } /* Validate RECORD & STRUCT element NAMES (use lower case). */ final List<String> hiveNamesLower = new ArrayList<String>(); for (String name : hiveRecFieldNames) { hiveNamesLower.add(name.toLowerCase()); } for (String kvRecFieldName : kvRecFieldNames) { /* Validate the current KV and Hive Record field names. */ final String kvFieldLower = kvRecFieldName.toLowerCase(); if (!hiveNamesLower.contains(kvFieldLower)) { LOG.error("Field type MISMATCH: for KV RECORD field " + "type and Hive STRUCT column type, " + "KV RECORD element name [" + kvFieldLower + "] does NOT MATCH any Hive STRUCT element " + "names " + hiveNamesLower); return false; } /* * The current KV element name matches one of the element * names in the Hive STRUCT. Get the corresponding KV and * Hive data types and compare them. * * Note that the method * StructTypeInfo.getStructFieldTypeInfo(<fieldname>) * appears to have a bug (see below). Therefore, a local * version of that method is defined in this class and * used here. * * Note also that because FieldDef.getField(<fieldname>) * is NOT case sensitive, the corresponding values of * hiveRecFieldNames can be used when retrieving the * element types of the KV RECORD. */ final TypeInfo hiveRecFieldType = getStructFieldTypeInfo(kvRecFieldName, (StructTypeInfo) hiveColumnType); final FieldDef kvRecFieldDef = kvRecDef.getFieldDef(kvRecFieldName); if (!kvHiveTypesMatch(kvRecFieldDef, hiveRecFieldType)) { LOG.error("Field type MISMATCH: for KV RECORD field " + "type and Hive STRUCT column type, " + "KV RECORD element type [" + kvRecFieldDef.getType() + "] does " + "NOT MATCH the corresponding Hive STRUCT " + "element type [" + hiveRecFieldType + "]"); return false; } } return true; default: LOG.error("Field type MISMATCH: UNKNOWN KV field type " + "[" + kvFieldType + "]"); return false; } }
From source file:org.acmsl.queryj.AbstractQueryJChain.java
/** * Sends given command to a concrete chain. * @param chain the concrete chain./*from w w w . ja va2 s.co m*/ * @param command the command that represents which actions should be done. * @return <code>true</code> if the command is processed by the chain. * @throws QueryJBuildException if the process fails. */ protected boolean process(@NotNull final Chain<C, QueryJBuildException, CH> chain, @NotNull final C command) throws QueryJBuildException { boolean result = false; @Nullable final Log t_Log = command.getLog(); final boolean t_bLoggingEnabled = (t_Log != null); boolean restart = false; do { try { @Nullable CH t_CurrentCommandHandler = null; do { t_CurrentCommandHandler = getNextChainLink(chain, t_CurrentCommandHandler); if (t_bLoggingEnabled) { t_Log.debug("Next handler: " + t_CurrentCommandHandler); } if (t_CurrentCommandHandler != null) { result = t_CurrentCommandHandler.handle(command); if (t_bLoggingEnabled) { t_Log.debug(t_CurrentCommandHandler + "#handle(QueryJCommand) returned " + result); } } } while ((!result) && (t_CurrentCommandHandler != null) && (!restart)); } catch (@NotNull final DevelopmentModeException devMode) { restart = true; } catch (@NotNull final QueryJBuildException buildException) { cleanUpOnError(buildException, command); if (t_bLoggingEnabled) { t_Log.error("QueryJ could not generate sources correctly.", buildException); } throw buildException; } } while (restart); return result; }
From source file:org.acmsl.queryj.api.AbstractTemplateGenerator.java
/** * Performs the generation process.//from ww w. j a v a 2 s .c o m * @param template the template. * @param caching whether template caching is enabled. * @param fileName the file name. * @param outputDir the output folder. * @param rootFolder the root folder. * @param charset the {@link Charset} to use. * @param fileUtils the {@link FileUtils} instance. * @param log the {@link Log} instance. * @return whether it gets written to disk. * @throws IOException if the template cannot be written to disk. * @throws QueryJBuildException if the template cannot be generated. */ protected boolean generate(@NotNull final N template, final boolean caching, @NotNull final String fileName, @NotNull final File outputDir, @NotNull final File rootFolder, @NotNull final Charset charset, @NotNull final FileUtils fileUtils, @Nullable final Log log) throws IOException, QueryJBuildException { boolean result = false; @Nullable final ST relevantStTemplate = template.generate(true); @Nullable final String relevantContent; if (relevantStTemplate != null) { relevantContent = relevantStTemplate.render(); } else { relevantContent = null; } if (relevantContent != null) { @NotNull final String newHash = computeHash(relevantContent, charset); @Nullable final String oldHash = retrieveHash(fileName, outputDir, rootFolder, charset, fileUtils); if ((oldHash == null) || (!newHash.equals(oldHash))) { result = true; } if (result) { @NotNull final String t_strOutputFile = outputDir.getAbsolutePath() + File.separator + fileName; if (caching) { serializeTemplate(template, getOutputDir(outputDir, rootFolder).getAbsolutePath() + File.separator + "." + fileName + ".ser"); } @Nullable final ST stTemplate = template.generate(false); @Nullable String t_strFileContents = ""; if (stTemplate != null) { try { t_strFileContents = stTemplate.render(); } catch (@NotNull final Throwable throwable) { @Nullable final Log t_Log = UniqueLogFactory.getLog(AbstractQueryJTemplate.class); if (t_Log != null) { t_Log.error("Error in template " + template.getTemplateName(), throwable); } /* @Nullable final STTreeView debugTool = new StringTemplateTreeView("Debugging " + getTemplateName(), t_Template); debugTool.setVisible(true); while (debugTool.isVisible()) { try { Thread.sleep(1000); } catch (InterruptedException e) { e.printStackTrace(); } }*/ } } if (!"".equals(t_strFileContents)) { @NotNull final File t_FinalDir = new File(t_strOutputFile).getParentFile(); final boolean folderCreated = t_FinalDir.mkdirs(); if ((!folderCreated) && (!outputDir.exists())) { throw new IOException("Cannot create output dir: " + t_FinalDir); } else if (t_strFileContents != null) { if ((log != null) && (log.isDebugEnabled())) { log.debug("Writing " + (t_strFileContents.length() * 2) + " bytes (" + charset + "): " + t_strOutputFile); } } if (t_strFileContents != null) { fileUtils.writeFile(t_strOutputFile, t_strFileContents, charset); } writeHash(newHash, fileName, outputDir, rootFolder, charset, fileUtils); } else { if ((log != null) && (log.isDebugEnabled())) { log.debug("Not writing " + t_strOutputFile + " since the generated content is empty"); } } } } return result; }
From source file:org.acmsl.queryj.api.AbstractTemplateGeneratorThread.java
/** * Runs the template generation process. * @param templateGenerator the template generator. * @param template the template.//from w w w . j a v a2 s .c om * @param outputDir the output folder. * @param rootFolder the root folder. * @param charset the {@link Charset} to use. * @param threadIndex the thread index. * @param barrier the cyclic barrier. * @param log the {@link Log} instance. */ @ThreadSafe protected void runGenerator(@NotNull final TG templateGenerator, @NotNull final T template, @NotNull final File outputDir, @NotNull final File rootFolder, @NotNull final Charset charset, final int threadIndex, @Nullable final CyclicBarrier barrier, @Nullable final Log log) { boolean generated = false; try { generated = templateGenerator.write(template, outputDir, rootFolder, charset); } catch (@NotNull final QueryJBuildException unknownException) { if (log != null) { log.warn(unknownException); } } catch (@NotNull final IOException ioException) { if (log != null) { log.warn(ioException); } } if (generated) { if (log != null) { log.debug(buildSuccessLogMessage(template, threadIndex)); } } if (barrier != null) { try { barrier.await(); } catch (@NotNull final InterruptedException interrupted) { if (log != null) { log.debug("Interrupted thread", interrupted); } Thread.currentThread().interrupt(); } catch (@NotNull final BrokenBarrierException brokenBarrier) { if (log != null) { log.warn(AbstractTemplateWritingHandler.BROKEN_BARRIER_LITERAL, brokenBarrier); } } } }
From source file:org.acmsl.queryj.api.MetaLanguageUtils.java
/** * Sets up the comment parser./*from ww w.jav a 2 s. c o m*/ * @param comment the comment to parse. * @return the {@link PerCommentParser} instance. */ @SuppressWarnings("unchecked") protected PerCommentParser setUpParser(@NotNull final String comment) throws RecognitionException { @NotNull final PerCommentParser result; @Nullable final Log t_Log = UniqueLogFactory.getLog(MetaLanguageUtils.class); if ((t_Log != null) && (t_Log.isDebugEnabled())) { t_Log.debug("Parsing '" + comment + "'"); } @NotNull final PerCommentLexer t_Lexer = new PerCommentLexer(new ANTLRInputStream(comment)); @NotNull final CommonTokenStream t_Tokens = new CommonTokenStream(t_Lexer); result = new PerCommentParser(t_Tokens); @NotNull final PerCommentListener listener = new PerCommentLoggingListener(comment); result.addParseListener(listener); return result; }
From source file:org.acmsl.queryj.customsql.handlers.customsqlvalidation.RetrieveQueryHandler.java
/** * Retrieves the current {@link Sql query}, and delegates * the flow to given chain./*w ww . ja va 2 s .c om*/ * @param command the command. * @param chain the chain. * @return {@code false} if everything went fine. * @throws QueryJBuildException if the operation fails. */ protected boolean handle(@NotNull final QueryJCommand command, @NotNull final CustomQueryChain chain) throws QueryJBuildException { int t_iIndex = retrieveCurrentSqlIndex(command); @Nullable final Log t_Log = UniqueLogFactory.getLog(RetrieveQueryHandler.class); @NotNull final List<Sql<String>> t_lSql = retrieveSqlList(command); final int t_iTotalQueries = t_lSql.size(); @Nullable final Chronometer t_Chronometer; if ((t_Log != null) && (t_Log.isInfoEnabled())) { t_Chronometer = new Chronometer(); t_Log.info("Validating up to " + t_iTotalQueries + " queries. It can take some time."); } else { t_Chronometer = null; } while ((t_iIndex > -1) && (t_iIndex < t_lSql.size())) { @NotNull final Sql<String> t_Sql = t_lSql.get(t_iIndex); setCurrentSql(t_Sql, command); if ((t_Log != null) && (t_Log.isDebugEnabled())) { t_Log.debug("[" + t_iIndex + "/" + t_iTotalQueries + "] / " + t_Sql.getId()); } setCurrentSqlIndex(t_iIndex++, command); chain.process(command); } if ((t_Log != null) && (t_Chronometer != null)) { t_Log.info("Validation took " + t_Chronometer.now()); } return false; }