Example usage for java.util.logging Logger isLoggable

List of usage examples for java.util.logging Logger isLoggable

Introduction

In this page you can find the example usage for java.util.logging Logger isLoggable.

Prototype

public boolean isLoggable(Level level) 

Source Link

Document

Check if a message of the given level would actually be logged by this logger.

Usage

From source file:org.protempa.backend.dsb.relationaldb.AbstractSQLGenerator.java

private static void logApplicableEntitySpecs(List<EntitySpec> allEntitySpecsCopy) {
    Logger logger = SQLGenUtil.logger();
    if (logger.isLoggable(Level.FINER)) {
        String[] allEntitySpecsCopyNames = new String[allEntitySpecsCopy.size()];
        int i = 0;
        for (EntitySpec aesc : allEntitySpecsCopy) {
            allEntitySpecsCopyNames[i++] = aesc.getName();
        }/*from w  w w .  j  a v  a 2  s . c o m*/
        logger.log(Level.FINER, "Applicable entity specs are {0}",
                StringUtils.join(allEntitySpecsCopyNames, ", "));
    }
}

From source file:org.protempa.backend.dsb.relationaldb.AbstractSQLGenerator.java

private <P extends Proposition> void generateAndExecuteSelectStreaming(EntitySpec entitySpec,
        ReferenceSpec referenceSpec, Set<String> propIds, Set<Filter> filtersCopy,
        List<EntitySpec> entitySpecsCopy, LinkedHashMap<String, ReferenceSpec> inboundRefSpecs,
        Set<String> keyIds, SQLOrderBy order, StreamingResultProcessor<P> resultProcessor,
        StreamingSQLExecutor executor, boolean wrapKeyId) throws DataSourceReadException {
    Logger logger = SQLGenUtil.logger();
    String backendNameForMessages = backendNameForMessages();
    String entitySpecName = entitySpec.getName();

    if (logger.isLoggable(Level.FINE)) {
        logger.log(Level.FINE, "Data source backend {0} is generating query for {1}",
                new Object[] { backendNameForMessages, entitySpecName });
    }//w w w.  j  a v  a  2 s.  co m

    String query = getSelectStatement(entitySpec, referenceSpec, entitySpecsCopy, inboundRefSpecs, filtersCopy,
            propIds, keyIds, order, resultProcessor, this.stagedTableSpecs, wrapKeyId).generateStatement();

    if (logger.isLoggable(Level.FINE)) {
        logger.log(Level.FINE, "Data source backend {0} generated the following query for {1}: {2}",
                new Object[] { backendNameForMessages, entitySpecName, query });
    }
    executor.executeSelect(entitySpecName, query, resultProcessor);
}

From source file:org.protempa.backend.dsb.relationaldb.AbstractSQLGenerator.java

private Map<EntitySpec, List<String>> entitySpecToPropIds(Set<String> propIds) throws AssertionError {
    Map<EntitySpec, List<String>> result = new HashMap<>();
    for (String propId : propIds) {
        boolean inDataSource = populateEntitySpecToPropIdMap(new String[] { propId }, result);
        Logger logger = SQLGenUtil.logger();
        if (!inDataSource && logger.isLoggable(Level.FINER)) {
            logger.log(Level.FINER, "Data source backend {0} does not know about proposition {1}",
                    new Object[] { backendNameForMessages(), propId });
        }//  w  w  w  . j  a  v a 2s .  co  m
    }
    return result;
}

From source file:org.protempa.backend.dsb.relationaldb.ConstantResultProcessor.java

@Override
public void process(ResultSet resultSet) throws SQLException {
    ResultCache<Constant> results = getResults();
    EntitySpec entitySpec = getEntitySpec();
    String entitySpecName = entitySpec.getName();
    //boolean hasRefs = entitySpec.getInboundRefSpecs().length > 0;
    String[] propIds = entitySpec.getPropositionIds();
    ColumnSpec codeSpec = entitySpec.getCodeSpec();
    if (codeSpec != null) {
        List<ColumnSpec> codeSpecL = codeSpec.asList();
        codeSpec = codeSpecL.get(codeSpecL.size() - 1);
    }/*from  w w  w . j ava  2 s  .  c  om*/
    Logger logger = SQLGenUtil.logger();
    PropertySpec[] propertySpecs = entitySpec.getPropertySpecs();
    Value[] propertyValues = new Value[propertySpecs.length];
    int count = 0;
    String[] uniqueIds = new String[entitySpec.getUniqueIdSpecs().length];
    SourceSystem dsType = DataSourceBackendSourceSystem.getInstance(getDataSourceBackendId());
    ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
    int[] columnTypes = new int[resultSetMetaData.getColumnCount()];
    for (int i = 0; i < columnTypes.length; i++) {
        columnTypes[i] = resultSetMetaData.getColumnType(i + 1);
    }

    while (resultSet.next()) {
        int i = 1;
        String keyId = resultSet.getString(i++);
        if (keyId == null) {
            logger.warning("A keyId is null. Skipping record.");
            continue;
        }

        i = readUniqueIds(uniqueIds, resultSet, i);
        if (Arrays.contains(uniqueIds, null)) {
            if (logger.isLoggable(Level.WARNING)) {
                logger.log(Level.WARNING, "Unique ids contain null ({0}). Skipping record.",
                        StringUtils.join(uniqueIds, ", "));
                continue;
            }
        }
        UniqueId uniqueId = generateUniqueId(entitySpecName, uniqueIds);

        String propId = null;
        if (!isCasePresent()) {
            if (codeSpec == null) {
                assert propIds.length == 1 : "Don't know which proposition id to assign to";
                propId = propIds[0];
            } else {
                String code = resultSet.getString(i++);
                propId = sqlCodeToPropositionId(codeSpec, code);
                if (propId == null) {
                    continue;
                }
            }
        } else {
            i++;
        }

        i = extractPropertyValues(resultSet, i, propertyValues, columnTypes);

        if (isCasePresent()) {
            propId = resultSet.getString(i++);
        }

        Constant cp = new Constant(propId, uniqueId);
        for (int j = 0; j < propertySpecs.length; j++) {
            PropertySpec propertySpec = propertySpecs[j];
            cp.setProperty(propertySpec.getName(), propertyValues[j]);
        }
        cp.setSourceSystem(dsType);
        logger.log(Level.FINEST, "Created constant {0}", cp);
        results.add(keyId, cp);
        if (++count % FLUSH_SIZE == 0) {
            try {
                results.flush(this);
            } catch (IOException ex) {
                throw new QueryResultsCacheException("Flushing primitive parameters to cache failed", ex);
            }
            if (logger.isLoggable(Level.FINE)) {
                Logging.logCount(logger, Level.FINE, count, "Retrieved {0} record", "Retrieved {0} records");
            }
        }
    }
    try {
        results.flush(this);
    } catch (IOException ex) {
        throw new QueryResultsCacheException("Flushing primitive parameters to cache failed", ex);
    }
    if (logger.isLoggable(Level.FINE)) {
        Logging.logCount(logger, Level.FINE, count, "Retrieved {0} record total",
                "Retrieved {0} records total");
    }
}

From source file:org.protempa.backend.dsb.relationaldb.EventResultProcessor.java

@Override
public void process(ResultSet resultSet) throws SQLException {
    ResultCache<Event> results = getResults();
    EntitySpec entitySpec = getEntitySpec();
    String entitySpecName = entitySpec.getName();
    //boolean hasRefs = entitySpec.getInboundRefSpecs().length > 0;
    String[] propIds = entitySpec.getPropositionIds();
    ColumnSpec codeSpec = entitySpec.getCodeSpec();
    if (codeSpec != null) {
        List<ColumnSpec> codeSpecL = codeSpec.asList();
        codeSpec = codeSpecL.get(codeSpecL.size() - 1);
    }// www  . j a v a2s. c  om
    Logger logger = SQLGenUtil.logger();
    PropertySpec[] propertySpecs = entitySpec.getPropertySpecs();
    Value[] propertyValues = new Value[propertySpecs.length];
    int count = 0;
    ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
    int[] columnTypes = new int[resultSetMetaData.getColumnCount()];
    for (int i = 0; i < columnTypes.length; i++) {
        columnTypes[i] = resultSetMetaData.getColumnType(i + 1);
    }
    String[] uniqueIds = new String[entitySpec.getUniqueIdSpecs().length];
    SourceSystem dsType = DataSourceBackendSourceSystem.getInstance(getDataSourceBackendId());
    JDBCPositionFormat positionParser = entitySpec.getPositionParser();

    while (resultSet.next()) {
        int i = 1;
        String keyId = resultSet.getString(i++);
        if (keyId == null) {
            logger.warning("A keyId is null. Skipping record.");
            continue;
        }

        i = readUniqueIds(uniqueIds, resultSet, i);
        if (Arrays.contains(uniqueIds, null)) {
            if (logger.isLoggable(Level.WARNING)) {
                logger.log(Level.WARNING, "Unique ids contain null ({0}). Skipping record.",
                        StringUtils.join(uniqueIds, ", "));
                continue;
            }
        }
        UniqueId uniqueId = generateUniqueId(entitySpecName, uniqueIds);

        String propId = null;
        if (!isCasePresent()) {
            if (codeSpec == null) {
                assert propIds.length == 1 : "Don't know which proposition id to assign to";
                propId = propIds[0];
            } else {
                String code = resultSet.getString(i++);
                propId = sqlCodeToPropositionId(codeSpec, code);
                if (propId == null) {
                    continue;
                }
            }
        } else {
            i++;
        }

        ColumnSpec finishTimeSpec = entitySpec.getFinishTimeSpec();
        Granularity gran = entitySpec.getGranularity();
        Interval interval = null;
        if (finishTimeSpec == null) {
            Long d = null;
            try {
                d = positionParser.toPosition(resultSet, i, columnTypes[i - 1]);
                i++;
            } catch (SQLException e) {
                logger.log(Level.WARNING, "Could not parse timestamp. Leaving the start time/timestamp unset.",
                        e);
            }
            interval = intervalFactory.getInstance(d, gran);
        } else {
            Long start = null;
            try {
                start = positionParser.toPosition(resultSet, i, columnTypes[i - 1]);
            } catch (SQLException e) {
                logger.log(Level.WARNING, "Could not parse start time. Leaving the start time/timestamp unset.",
                        e);
            } finally {
                i++;
            }
            Long finish = null;
            try {
                finish = positionParser.toPosition(resultSet, i, columnTypes[i - 1]);
            } catch (SQLException e) {
                logger.log(Level.WARNING, "Could not parse start time. Leaving the finish time unset.", e);
            } finally {
                i++;
            }
            if (finish != null && start != null && finish.compareTo(start) < 0) {
                logger.log(Level.WARNING, "Finish {0} is before start {1}: Leaving time unset",
                        new Object[] { finish, start });
                interval = intervalFactory.getInstance(null, gran, null, gran);
            } else {
                interval = intervalFactory.getInstance(start, gran, finish, gran);
            }
        }

        i = extractPropertyValues(resultSet, i, propertyValues, columnTypes);

        if (isCasePresent()) {
            propId = resultSet.getString(i++);
        }

        Event event = new Event(propId, uniqueId);
        event.setSourceSystem(dsType);
        event.setInterval(interval);
        for (int j = 0; j < propertySpecs.length; j++) {
            PropertySpec propertySpec = propertySpecs[j];
            event.setProperty(propertySpec.getName(), propertyValues[j]);
        }
        logger.log(Level.FINEST, "Created event {0}", event);
        results.add(keyId, event);
        if (++count % FLUSH_SIZE == 0) {
            try {
                results.flush(this);
            } catch (IOException ex) {
                throw new QueryResultsCacheException("Flushing primitive parameters to cache failed", ex);
            }
            if (logger.isLoggable(Level.FINE)) {
                Logging.logCount(logger, Level.FINE, count, "Retrieved {0} record", "Retrieved {0} records");
            }
        }
    }
    try {
        results.flush(this);
    } catch (IOException ex) {
        throw new QueryResultsCacheException("Flushing primitive parameters to cache failed", ex);
    }
    if (logger.isLoggable(Level.FINE)) {
        Logging.logCount(logger, Level.FINE, count, "Retrieved {0} record total",
                "Retrieved {0} records total");
    }
}

From source file:org.protempa.backend.dsb.relationaldb.PrimitiveParameterResultProcessor.java

@Override
public void process(ResultSet resultSet) throws SQLException {
    ResultCache<PrimitiveParameter> results = getResults();
    EntitySpec entitySpec = getEntitySpec();
    String entitySpecName = entitySpec.getName();
    //boolean hasRefs = entitySpec.getInboundRefSpecs().length > 0;
    String[] propIds = entitySpec.getPropositionIds();
    ColumnSpec codeSpec = entitySpec.getCodeSpec();
    if (codeSpec != null) {
        List<ColumnSpec> codeSpecL = codeSpec.asList();
        codeSpec = codeSpecL.get(codeSpecL.size() - 1);
    }/*w  w w . j ava2  s.  c o m*/
    Logger logger = SQLGenUtil.logger();
    PropertySpec[] propertySpecs = entitySpec.getPropertySpecs();
    Value[] propertyValues = new Value[propertySpecs.length];
    int count = 0;
    String[] uniqueIds = new String[entitySpec.getUniqueIdSpecs().length];
    ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
    int[] columnTypes = new int[resultSetMetaData.getColumnCount()];
    for (int i = 0; i < columnTypes.length; i++) {
        columnTypes[i] = resultSetMetaData.getColumnType(i + 1);
    }
    SourceSystem dsType = DataSourceBackendSourceSystem.getInstance(getDataSourceBackendId());

    while (resultSet.next()) {
        int i = 1;

        String keyId = resultSet.getString(i++);
        if (keyId == null) {
            logger.warning("A keyId is null. Skipping record.");
            continue;
        }

        i = readUniqueIds(uniqueIds, resultSet, i);
        if (Arrays.contains(uniqueIds, null)) {
            if (logger.isLoggable(Level.WARNING)) {
                logger.log(Level.WARNING, "Unique ids contain null ({0}). Skipping record.",
                        StringUtils.join(uniqueIds, ", "));
                continue;
            }
        }
        UniqueId uniqueId = generateUniqueId(entitySpecName, uniqueIds);

        String propId = null;
        if (!isCasePresent()) {
            if (codeSpec == null) {
                assert propIds.length == 1 : "Don't know which proposition id to assign to";
                propId = propIds[0];
            } else {
                String code = resultSet.getString(i++);
                propId = sqlCodeToPropositionId(codeSpec, code);
                if (propId == null) {
                    continue;
                }
            }
        } else {
            i++;
        }

        Long timestamp = null;
        try {
            timestamp = entitySpec.getPositionParser().toPosition(resultSet, i, columnTypes[i - 1]);
            i++;
        } catch (SQLException e) {
            logger.log(Level.WARNING, "Could not parse timestamp. Leaving timestamp unset.", e);
        }

        ValueType valueType = entitySpec.getValueType();
        String cpValStr = resultSet.getString(i++);
        Value cpVal = valueType.parse(cpValStr);

        i = extractPropertyValues(resultSet, i, propertyValues, columnTypes);

        if (isCasePresent()) {
            propId = resultSet.getString(i++);
        }

        PrimitiveParameter p = new PrimitiveParameter(propId, uniqueId);
        p.setPosition(timestamp);
        p.setGranularity(entitySpec.getGranularity());
        p.setValue(cpVal);
        for (int j = 0; j < propertySpecs.length; j++) {
            PropertySpec propertySpec = propertySpecs[j];
            p.setProperty(propertySpec.getName(), propertyValues[j]);
        }
        p.setSourceSystem(dsType);

        logger.log(Level.FINEST, "Created primitive parameter {0}", p);
        results.add(keyId, p);
        if (++count % FLUSH_SIZE == 0) {
            try {
                results.flush(this);
            } catch (IOException ex) {
                throw new QueryResultsCacheException("Flushing primitive parameters to cache failed", ex);
            }
            if (logger.isLoggable(Level.FINE)) {
                Logging.logCount(logger, Level.FINE, count, "Retrieved {0} record", "Retrieved {0} records");
            }
        }
    }
    try {
        results.flush(this);
    } catch (IOException ex) {
        throw new QueryResultsCacheException("Flushing primitive parameters to cache failed", ex);
    }
    if (logger.isLoggable(Level.FINE)) {
        Logging.logCount(logger, Level.FINE, count, "Retrieved {0} record total",
                "Retrieved {0} records total");
    }
}

From source file:org.protempa.dest.table.Link.java

private boolean constraintsCheckCompatible(Proposition proposition, PropertyConstraint[] constraints) {
    Logger logger = Util.logger();
    for (PropertyConstraint ccc : constraints) {
        String propName = ccc.getPropertyName();
        Value value = proposition.getProperty(propName);
        if (value != null) {
            ValueComparator vc = ccc.getValueComparator();
            if (logger.isLoggable(Level.FINER)) {
                logger.log(Level.FINER, "Proposition is {0}; Property is {1}; Value is {2}; Comparator: {3}",
                        new Object[] { proposition.getId(), propName, value, vc });
            }/*from  ww w .j a  v  a 2  s  .  com*/
            if (!vc.compare(value, ccc.getValue())) {
                return false;
            }
        } else {
            return false;
        }
    }
    return true;
}

From source file:org.protempa.dest.table.TableQueryResultsHandler.java

@Override
public void start(Collection<PropositionDefinition> cache) throws QueryResultsHandlerProcessingException {
    Logger logger = Util.logger();
    if (this.headerWritten) {
        try {/*ww  w .  jav  a2  s.  c  o m*/
            List<String> columnNames = new ArrayList<>();
            columnNames.add("KeyId");
            for (TableColumnSpec columnSpec : this.columnSpecs) {
                logger.log(Level.FINE, "Processing columnSpec type {0}", columnSpec.getClass().getName());
                String[] colNames = columnSpec.columnNames(this.knowledgeSource);
                assert colNames.length > 0 : "colNames must have length > 0";
                for (int index = 0; index < colNames.length; index++) {
                    String colName = colNames[index];
                    if (this.replace.containsKey(colName)) {
                        colNames[index] = this.replace.get(colName);
                    }
                }
                if (logger.isLoggable(Level.FINE)) {
                    logger.log(Level.FINE, "Got the following columns for proposition {0}: {1}", new Object[] {
                            StringUtils.join(this.rowPropositionIds, ", "), StringUtils.join(colNames, ", ") });
                }
                for (String colName : colNames) {
                    columnNames.add(colName);
                }
            }
            StringUtil.escapeAndWriteDelimitedColumns(columnNames, this.columnDelimiter, this.out);
            this.out.newLine();
        } catch (KnowledgeSourceReadException ex1) {
            throw new QueryResultsHandlerProcessingException("Error reading knowledge source", ex1);
        } catch (IOException ex) {
            throw new QueryResultsHandlerProcessingException("Could not write header", ex);
        }
    }

    try {
        this.ksCache = new KnowledgeSourceCacheFactory().getInstance(this.knowledgeSource, cache, true);
    } catch (KnowledgeSourceReadException ex) {
        throw new QueryResultsHandlerProcessingException(ex);
    }
}

From source file:org.protempa.dest.xml.XmlQueryResultsHandler.java

private Element handleReferences(Set<UniqueId> handled, Map<Proposition, List<Proposition>> forwardDerivations,
        Map<Proposition, List<Proposition>> backwardDerivations, Map<UniqueId, Proposition> references,
        Proposition proposition, XmlPropositionVisitor visitor, Document document) throws ProtempaException {
    Element referencesElem = document.createElement("references");
    Collection<String> orderedReferences = orderReferences(proposition);
    Logger logger = Util.logger();
    if (logger.isLoggable(Level.FINEST)) {
        logger.log(Level.FINEST, "Ordered References for proposition {0}: {1}",
                new Object[] { proposition.getId(), orderedReferences });
    }//  w w w .j av a 2s  . co m
    if (orderedReferences != null) {
        for (String refName : orderedReferences) {
            logger.log(Level.FINEST, "Processing reference {0}", refName);
            List<UniqueId> uids = proposition.getReferences(refName);
            logger.log(Level.FINEST, "Total unique identifiers: {0}", uids.size());
            logger.log(Level.FINEST, "UniqueIdentifiers: {0}", uids);
            List<Proposition> refProps = createReferenceList(uids, references);
            logger.log(Level.FINEST, "Total referred propositions:  {0}", refProps.size());
            if (!refProps.isEmpty()) {
                List<Proposition> filteredReferences = filterHandled(refProps, handled);
                logger.log(Level.FINEST, "Total filtered referred propositions: {0}",
                        filteredReferences.size());
                if (!filteredReferences.isEmpty()) {
                    Element refElem = document.createElement("reference");
                    refElem.setAttribute("name", refName);
                    for (Proposition refProp : filteredReferences) {
                        Element e = handleProposition(handled, forwardDerivations, backwardDerivations,
                                references, refProp, visitor, document);
                        if (e != null) {
                            refElem.appendChild(e);
                        }
                    }
                    referencesElem.appendChild(refElem);
                } else {
                    logger.log(Level.FINEST, "Skipping reference {0} because all propositions were handled",
                            refName);
                }
            }
        }
    }
    return referencesElem;
}

From source file:org.usrz.libs.logging.LevelDebugTest.java

@Test
public void testJavaLogging() {
    final java.util.logging.Logger logger = java.util.logging.Logger.getLogger(this.getClass().getName());

    logger.finest("Foobar FINEST");
    AppenderForTests.hasNoLastEvent("at Finest level");
    assertFalse(logger.isLoggable(java.util.logging.Level.FINEST));

    logger.finer("Foobar FINER");
    AppenderForTests.hasNoLastEvent("at Finer level");
    assertFalse(logger.isLoggable(java.util.logging.Level.FINER));

    logger.fine("Foobar FINE");
    AppenderForTests.hasLastEvent("at Fine level");
    assertTrue(logger.isLoggable(java.util.logging.Level.FINE));

    logger.config("Foobar CONFIG");
    AppenderForTests.hasLastEvent("at Config level");
    assertTrue(logger.isLoggable(java.util.logging.Level.CONFIG));

    logger.info("Foobar INFO");
    AppenderForTests.hasLastEvent("at Info level");
    assertTrue(logger.isLoggable(java.util.logging.Level.INFO));

    logger.warning("Foobar WARNING");
    AppenderForTests.hasLastEvent("at Warning level");
    assertTrue(logger.isLoggable(java.util.logging.Level.WARNING));

    logger.severe("Foobar SEVERE");
    AppenderForTests.hasLastEvent("at Severe level");
    assertTrue(logger.isLoggable(java.util.logging.Level.SEVERE));

}