Example usage for org.apache.commons.logging Log isInfoEnabled

List of usage examples for org.apache.commons.logging Log isInfoEnabled

Introduction

In this page you can find the example usage for org.apache.commons.logging Log isInfoEnabled.

Prototype

boolean isInfoEnabled();

Source Link

Document

Is info logging currently enabled?

Usage

From source file:de.ingrid.iplug.csw.dsc.cache.impl.AbstractUpdateStrategy.java

/**
 * Fetch all records that satisfy the given filter using the GetRecords and
 * return the ids and put them into the cache
 * @note This method guarantees to query the server without a constraint, if the
 * provided filter set is empty /*from w  ww.ja v a  2s .  co m*/
 * 
 * @param client The CSWClient to use
 * @param elementSetName The ElementSetName of the records to fetch
 * @param filterSet The filter set used to select the records
 * @param doCache Determines wether to cache the record or not
 * @return A list of ids of the fetched records
 * @throws Exception
 */
protected List<String> fetchRecords(CSWClient client, ElementSetName elementSetName, Set<Document> filterSet,
        boolean doCache) throws Exception {

    CSWFactory factory = client.getFactory();
    Log log = this.getLog();

    // if the filter set is empty, we add a null a least
    // this causes execution of the iteration below, but
    // but will not add a constraint definition to the request
    if (filterSet == null)
        filterSet = new HashSet<Document>();
    if (filterSet.size() == 0)
        filterSet.add(null);

    // variables for complete fetch process
    // int numTotal = 0;
    List<String> fetchedRecordIds = new CopyOnWriteArrayList<String>();

    // iterate over all filters
    int filterIndex = 1;
    for (Document filter : filterSet) {
        if (log.isDebugEnabled())
            log.debug("Processing filter " + filterIndex + ": "
                    + StringUtils.nodeToString(filter).replace("\n", "") + ".");

        // variables for current fetch process (current filter)
        int numRecordsTotal = 0;
        int numRecordsFetched = 0;
        List<String> currentFetchedRecordIds = new ArrayList<String>();

        // create the query
        CSWQuery query = factory.createQuery();
        query.setConstraint(filter);
        query.setResultType(ResultType.RESULTS);
        query.setElementSetName(elementSetName);
        query.setMaxRecords(this.recordsPerCall);
        query.setStartPosition(1);

        // do requests

        // do first request

        CSWSearchResult result = client.getRecords(query);
        numRecordsFetched += result.getNumberOfRecords();
        numRecordsTotal = result.getNumberOfRecordsTotal();
        if (log.isInfoEnabled())
            log.info(numRecordsTotal + " record(s) from filter " + filterIndex + ":");

        if (numRecordsTotal > 0) {

            if (log.isInfoEnabled()) {
                log.info("\nPARAMETERS OF FETCHING PROCESS:" + "\nrecords per chunk (request): "
                        + recordsPerCall + "\ngeneral pause between requesting next chunk (msec): "
                        + requestPause + "\nnum retries per chunk: " + cswConfig.numRetriesPerRequest
                        + "\npause between retries (msec): " + cswConfig.timeBetweenRetries
                        + "\nmax number of lost chunks: " + cswConfig.maxNumSkippedRequests);
            }

            // process
            currentFetchedRecordIds.addAll(processResult(result, doCache));

            int numSkippedRequests = 0;
            String logLostRecordChunks = "";
            int numLostRecords = 0;
            while (numRecordsFetched < numRecordsTotal) {
                if (cswConfig.maxNumSkippedRequests > -1) {
                    // fetching should end when a maximum number of failures (in a row) is reached.
                    if (numSkippedRequests > cswConfig.maxNumSkippedRequests) {
                        log.error("Problems fetching records. Total number of skipped requests reached ("
                                + cswConfig.maxNumSkippedRequests
                                + " requests without results). We end fetching process for this filter.");
                        statusProvider.addState(
                                "ERROR_FETCH", "Error during fetch, since more than "
                                        + cswConfig.maxNumSkippedRequests + " records have been skipped.",
                                Classification.ERROR);
                        break;
                    }
                }

                // generic pause between requests, set via spring
                Thread.sleep(this.requestPause);

                String logCurrRecordChunk = "";
                try {
                    // prepare next request
                    // Just for safety: get number of last fetched records from last result, if we have a result and records.
                    int numLastFetch = query.getMaxRecords();
                    if (result != null && (result.getNumberOfRecords() > 0)) {
                        numLastFetch = result.getNumberOfRecords();
                    }
                    numRecordsFetched += numLastFetch;
                    statusProvider.addState("FETCH",
                            "Fetching record " + (numRecordsFetched - numLastFetch + 1) + "-"
                                    + numRecordsFetched + " / " + numRecordsTotal + " from "
                                    + client.getFactory().getServiceUrl());

                    query.setStartPosition(query.getStartPosition() + numLastFetch);

                    // for logging below
                    logCurrRecordChunk = "" + query.getStartPosition() + " - "
                            + (query.getStartPosition() + query.getMaxRecords());

                    // do next request, if problems retry with increasing pause in between 
                    int numRetries = 0;
                    while (true) {
                        try {
                            result = null;
                            result = client.getRecords(query);
                            break;

                        } catch (Exception e) {
                            if (numRetries == cswConfig.numRetriesPerRequest) {
                                log.error("Retried " + numRetries + " times ! We skip records "
                                        + logCurrRecordChunk, e);
                                break;
                            }

                            numRetries++;
                            int timeBetweenRetry = numRetries * cswConfig.timeBetweenRetries;
                            log.error("Error fetching records " + logCurrRecordChunk + ". We retry "
                                    + numRetries + ". time after " + timeBetweenRetry + " msec !", e);
                            Thread.sleep(timeBetweenRetry);
                        }
                    }

                    // process
                    if (result == null || result.getNumberOfRecords() == 0) {
                        // no result from this query, we count the failures to check whether fetching process should be ended !
                        numSkippedRequests++;
                        numLostRecords += query.getMaxRecords();
                        logLostRecordChunks += logCurrRecordChunk + "\n";

                    } else {
                        currentFetchedRecordIds.addAll(processResult(result, doCache));
                    }
                } catch (Exception e) {
                    statusProvider.addState("ERROR_FETCH_PROCESS",
                            "Error during processing record: " + logCurrRecordChunk, Classification.ERROR);
                    log.error("Error processing records " + logCurrRecordChunk);
                    log.error(ExceptionUtils.getStackTrace(e));
                }
            }

            if (numLostRecords > 0) {
                statusProvider.addState("ERROR_FETCH_PROCESS",
                        "Error during fetching of record: " + logLostRecordChunks, Classification.ERROR);
                log.error("\nWe had failed GetRecords requests !!!" + "\nThe following " + numLostRecords
                        + " records were NOT fetched and are \"lost\":" + "\n" + logLostRecordChunks);
            }
        }

        // collect record ids
        fetchedRecordIds.addAll(currentFetchedRecordIds);
        // numTotal += currentFetchedRecordIds.size();
        filterIndex++;
    }
    return fetchedRecordIds;
}

From source file:com.cisco.dvbu.ps.common.util.CommonUtils.java

public static void writeOutput(String message, String prefix, String options, Log logger, boolean debug1,
        boolean debug2, boolean debug3) {

    // Determine if there is a prefix to prepend
    if (prefix == null) {
        prefix = "";
    } else {// w  ww  .j  av a2s . c o  m
        prefix = prefix + "::";
    }
    //Write out the log if not suppressed
    if (!options.contains("-suppress")) {

        //Write to log when -error
        if (options.contains("-error")) {
            if (logger.isErrorEnabled()) {
                logger.error(prefix + message);
            }
        }

        //Write to log when -info
        if (options.contains("-info")) {
            if (logger.isInfoEnabled()) {
                logger.info(prefix + message);
            }
        }

        //Write to log when -debug1
        if (options.contains("-debug1") && debug1) {
            // logger.isInfoEnabled() is checked on purpose.  Don't change it.
            if (logger.isInfoEnabled()) {
                logger.info("DEBUG1::" + prefix + message);
            }
        }

        //Write to log when -debug2
        if (options.contains("-debug2") && debug2) {
            // logger.isInfoEnabled() is checked on purpose.  Don't change it.
            if (logger.isInfoEnabled()) {
                logger.info("DEBUG2::" + prefix + message);
            }
        }

        //Write to log when -debug3
        if (options.contains("-debug3") && debug3) {
            // logger.isInfoEnabled() is checked on purpose.  Don't change it.
            if (logger.isInfoEnabled()) {
                logger.info("DEBUG3::" + prefix + message);
            }
        }
    }
}

From source file:com.amazon.carbonado.repo.indexed.ManagedIndex.java

/**
 * Build the entire index, repairing as it goes.
 *
 * @param repo used to enter transactions
 *//*  w w  w. ja v a 2 s.  c o  m*/
void buildIndex(double desiredSpeed) throws RepositoryException {
    final MergeSortBuffer buffer;
    final Comparator c;

    final Log log = LogFactory.getLog(IndexedStorage.class);

    final Query<S> masterQuery;
    {
        // Need to explicitly order master query by primary key in order
        // for fetchAfter to work correctly in case corrupt records are
        // encountered.
        masterQuery = mMasterStorage.query().orderBy(naturalOrdering(mMasterStorage.getStorableType()));
    }

    // Quick check to see if any records exist in master.
    {
        Transaction txn = mRepository.enterTopTransaction(IsolationLevel.READ_COMMITTED);
        try {
            if (!masterQuery.exists()) {
                if (mIndexEntryStorage.query().exists()) {
                    txn.exit();
                    mIndexEntryStorage.truncate();
                }
                return;
            }
        } finally {
            txn.exit();
        }
    }

    // Enter top transaction with isolation level of none to make sure
    // preload operation does not run in a long nested transaction.
    Transaction txn = mRepository.enterTopTransaction(IsolationLevel.NONE);
    try {
        Cursor<S> cursor = masterQuery.fetch();
        try {
            if (log.isInfoEnabled()) {
                StringBuilder b = new StringBuilder();
                b.append("Preparing index on ");
                b.append(mMasterStorage.getStorableType().getName());
                b.append(": ");
                try {
                    mIndex.appendTo(b);
                } catch (java.io.IOException e) {
                    // Not gonna happen.
                }
                log.info(b.toString());
            }

            // Preload and sort all index entries for improved performance.

            buffer = new MergeSortBuffer(mIndexEntryStorage, null, BUILD_SORT_BUFFER_SIZE);
            c = getComparator();
            buffer.prepare(c);

            long nextReportTime = System.currentTimeMillis() + BUILD_INFO_DELAY_MILLIS;

            // These variables are used when corrupt records are encountered.
            S lastUserStorable = null;
            int skippedCount = 0;

            while (cursor.hasNext()) {
                S userStorable;
                try {
                    userStorable = cursor.next();
                    skippedCount = 0;
                } catch (CorruptEncodingException e) {
                    log.warn("Omitting corrupt record from index: " + e.toString());

                    // Exception forces cursor to close. Close again to be sure.
                    cursor.close();

                    if (lastUserStorable == null) {
                        cursor = masterQuery.fetch();
                    } else {
                        cursor = masterQuery.fetchAfter(lastUserStorable);
                    }

                    cursor.skipNext(++skippedCount);
                    continue;
                }

                buffer.add(makeIndexEntry(userStorable));

                if (log.isInfoEnabled()) {
                    long now = System.currentTimeMillis();
                    if (now >= nextReportTime) {
                        log.info("Prepared " + buffer.size() + " index entries");
                        nextReportTime = now + BUILD_INFO_DELAY_MILLIS;
                    }
                }

                lastUserStorable = userStorable;
            }

            // No need to commit transaction because no changes should have been made.
        } finally {
            cursor.close();
        }
    } finally {
        txn.exit();
    }

    // This is not expected to take long, since MergeSortBuffer sorts as
    // needed. This just finishes off what was not written to a file.
    buffer.sort();

    if (isUnique()) {
        // If index is unique, scan buffer and check for duplicates
        // _before_ inserting index entries. If there are duplicates,
        // fail, since unique index cannot be built.

        if (log.isInfoEnabled()) {
            log.info("Verifying index");
        }

        Object last = null;
        for (Object obj : buffer) {
            if (last != null) {
                if (c.compare(last, obj) == 0) {
                    buffer.close();
                    throw new UniqueConstraintException("Cannot build unique index because duplicates exist: "
                            + this + ", " + last + " == " + obj);
                }
            }
            last = obj;
        }
    }

    final int bufferSize = buffer.size();

    if (log.isInfoEnabled()) {
        log.info("Begin build of " + bufferSize + " index entries");
    }

    // Need this index entry query for deleting bogus entries.
    final Query indexEntryQuery = mIndexEntryStorage.query()
            .orderBy(naturalOrdering(mIndexEntryStorage.getStorableType()));

    Throttle throttle = desiredSpeed < 1.0 ? new Throttle(BUILD_THROTTLE_WINDOW) : null;

    long totalInserted = 0;
    long totalUpdated = 0;
    long totalDeleted = 0;
    long totalProgress = 0;

    txn = enterBuildTxn();
    try {
        Cursor<? extends Storable> indexEntryCursor = indexEntryQuery.fetch();
        Storable existingIndexEntry = null;

        if (!indexEntryCursor.hasNext()) {
            indexEntryCursor.close();
            // Don't try opening again.
            indexEntryCursor = null;
        }

        boolean retry = false;
        Storable indexEntry = null;
        Storable lastIndexEntry = null;

        long nextReportTime = System.currentTimeMillis() + BUILD_INFO_DELAY_MILLIS;

        Iterator it = buffer.iterator();
        bufferIterate: while (true) {
            if (!retry) {
                Object obj;
                if (it.hasNext()) {
                    obj = it.next();
                } else if (indexEntryCursor != null && indexEntryCursor.hasNext()) {
                    obj = null;
                } else {
                    break;
                }

                indexEntry = (Storable) obj;
            }

            try {
                if (indexEntry != null) {
                    if (indexEntry.tryInsert()) {
                        totalInserted++;
                    } else {
                        // Couldn't insert because an index entry already exists.
                        Storable existing = indexEntry.copy();
                        boolean doUpdate = false;
                        if (!existing.tryLoad()) {
                            doUpdate = true;
                        } else if (!existing.equalProperties(indexEntry)) {
                            // If only the version differs, leave existing entry alone.
                            indexEntry.copyVersionProperty(existing);
                            doUpdate = !existing.equalProperties(indexEntry);
                        }
                        if (doUpdate) {
                            indexEntry.tryDelete();
                            indexEntry.tryInsert();
                            totalUpdated++;
                        }
                    }
                }

                if (indexEntryCursor != null)
                    while (true) {
                        if (existingIndexEntry == null) {
                            if (indexEntryCursor.hasNext()) {
                                existingIndexEntry = indexEntryCursor.next();
                            } else {
                                indexEntryCursor.close();
                                // Don't try opening again.
                                indexEntryCursor = null;
                                break;
                            }
                        }

                        int compare = c.compare(existingIndexEntry, indexEntry);

                        if (compare == 0) {
                            // Existing entry cursor matches so allow cursor to advance.
                            existingIndexEntry = null;
                            break;
                        } else if (compare > 0) {
                            // Existing index entry is ahead so check later.
                            break;
                        } else {
                            // Existing index entry might be bogus. Check again
                            // in case master record changed.
                            doDelete: {
                                S master = mMasterStorage.prepare();
                                copyToMasterPrimaryKey(existingIndexEntry, master);
                                if (master.tryLoad()) {
                                    Storable temp = makeIndexEntry(master);
                                    existingIndexEntry.copyVersionProperty(temp);
                                    if (existingIndexEntry.equalProperties(temp)) {
                                        break doDelete;
                                    }
                                }

                                existingIndexEntry.tryDelete();
                                totalDeleted++;

                                if (totalDeleted % BUILD_BATCH_SIZE == 0) {
                                    txn.commit();
                                    txn.exit();

                                    nextReportTime = logProgress(nextReportTime, log, totalProgress, bufferSize,
                                            totalInserted, totalUpdated, totalDeleted);

                                    txn = enterBuildTxn();

                                    indexEntryCursor.close();
                                    indexEntryCursor = indexEntryQuery.fetchAfter(existingIndexEntry);

                                    if (!indexEntryCursor.hasNext()) {
                                        indexEntryCursor.close();
                                        // Don't try opening again.
                                        indexEntryCursor = null;
                                        break;
                                    }
                                }
                            }

                            existingIndexEntry = null;

                            throttle(throttle, desiredSpeed);
                        }
                    }

                if (indexEntry != null) {
                    totalProgress++;
                }

                lastIndexEntry = indexEntry;
                retry = false;
            } catch (RepositoryException e) {
                if (e instanceof FetchTimeoutException || e instanceof PersistTimeoutException) {
                    log.warn("Lock conflict during index repair; will retry: " + indexEntry + ", " + e);
                    // This re-uses the last index entry to repair and forces
                    // the current transaction to commit.
                    retry = true;
                } else {
                    throw e;
                }
            }

            if (retry || (totalProgress % BUILD_BATCH_SIZE == 0)) {
                txn.commit();
                txn.exit();

                nextReportTime = logProgress(nextReportTime, log, totalProgress, bufferSize, totalInserted,
                        totalUpdated, totalDeleted);

                txn = enterBuildTxn();

                if (indexEntryCursor != null) {
                    indexEntryCursor.close();
                    existingIndexEntry = null;

                    if (indexEntry == null || lastIndexEntry == null) {
                        indexEntryCursor = indexEntryQuery.fetch();
                    } else if (!retry) {
                        indexEntryCursor = indexEntryQuery.fetchAfter(indexEntry);
                    } else {
                        // Re-fetch starting at the same spot.
                        indexEntryCursor = indexEntryQuery.fetchAfter(lastIndexEntry);
                    }
                }
            }

            throttle(throttle, desiredSpeed);
        }

        txn.commit();
    } finally {
        txn.exit();
        buffer.close();
    }

    if (log.isInfoEnabled()) {
        log.info("Finished building " + totalProgress + " index entries "
                + progressSubMessgage(totalInserted, totalUpdated, totalDeleted));
    }
}

From source file:com.espertech.esper.epl.fafquery.FireAndForgetQueryExec.java

public static Collection<EventBean> snapshot(FilterSpecCompiled optionalFilter, Annotation[] annotations,
        VirtualDWView virtualDataWindow, EventTableIndexRepository indexRepository, boolean queryPlanLogging,
        Log queryPlanLogDestination, String objectName, AgentInstanceContext agentInstanceContext) {

    if (optionalFilter == null || optionalFilter.getParameters().length == 0) {
        if (virtualDataWindow != null) {
            Pair<IndexMultiKey, EventTable> pair = virtualDataWindow
                    .getFireAndForgetDesc(Collections.<String>emptySet(), Collections.<String>emptySet());
            return virtualDataWindow.getFireAndForgetData(pair.getSecond(), new Object[0],
                    new RangeIndexLookupValue[0], annotations);
        }/*  w  w w . j  a va2 s  . c  om*/
        return null;
    }

    // Determine what straight-equals keys and which ranges are available.
    // Widening/Coercion is part of filter spec compile.
    Set<String> keysAvailable = new HashSet<String>();
    Set<String> rangesAvailable = new HashSet<String>();
    if (optionalFilter.getParameters().length == 1) {
        for (FilterSpecParam param : optionalFilter.getParameters()[0]) {
            if (!(param instanceof FilterSpecParamConstant || param instanceof FilterSpecParamRange
                    || param instanceof FilterSpecParamIn)) {
                continue;
            }
            if (param.getFilterOperator() == FilterOperator.EQUAL
                    || param.getFilterOperator() == FilterOperator.IS
                    || param.getFilterOperator() == FilterOperator.IN_LIST_OF_VALUES) {
                keysAvailable.add(param.getLookupable().getExpression());
            } else if (param.getFilterOperator().isRangeOperator()
                    || param.getFilterOperator().isInvertedRangeOperator()
                    || param.getFilterOperator().isComparisonOperator()) {
                rangesAvailable.add(param.getLookupable().getExpression());
            } else if (param.getFilterOperator().isRangeOperator()) {
                rangesAvailable.add(param.getLookupable().getExpression());
            }
        }
    }

    // Find an index that matches the needs
    Pair<IndexMultiKey, EventTableAndNamePair> tablePair;
    if (virtualDataWindow != null) {
        Pair<IndexMultiKey, EventTable> tablePairNoName = virtualDataWindow.getFireAndForgetDesc(keysAvailable,
                rangesAvailable);
        tablePair = new Pair<IndexMultiKey, EventTableAndNamePair>(tablePairNoName.getFirst(),
                new EventTableAndNamePair(tablePairNoName.getSecond(), null));
    } else {
        IndexHint indexHint = IndexHint.getIndexHint(annotations);
        List<IndexHintInstruction> optionalIndexHintInstructions = null;
        if (indexHint != null) {
            optionalIndexHintInstructions = indexHint.getInstructionsFireAndForget();
        }
        tablePair = indexRepository.findTable(keysAvailable, rangesAvailable, optionalIndexHintInstructions);
    }

    QueryPlanIndexHook hook = QueryPlanIndexHookUtil.getHook(annotations);
    if (queryPlanLogging && (queryPlanLogDestination.isInfoEnabled() || hook != null)) {
        String prefix = "Fire-and-forget from " + objectName + " ";
        String indexName = tablePair != null && tablePair.getSecond() != null
                ? tablePair.getSecond().getIndexName()
                : null;
        String indexText = indexName != null ? "index " + indexName + " " : "full table scan ";
        indexText += "(snapshot only, for join see separate query plan)";
        if (tablePair == null) {
            queryPlanLogDestination.info(prefix + indexText);
        } else {
            queryPlanLogDestination
                    .info(prefix + indexText + tablePair.getSecond().getEventTable().toQueryPlan());
        }

        if (hook != null) {
            hook.fireAndForget(
                    new QueryPlanIndexDescFAF(new IndexNameAndDescPair[] { new IndexNameAndDescPair(indexName,
                            tablePair != null ? tablePair.getSecond().getEventTable().getClass().getSimpleName()
                                    : null) }));
        }
    }

    if (tablePair == null) {
        return null; // indicates table scan
    }

    // Compile key sets which contain key index lookup values
    String[] keyIndexProps = IndexedPropDesc.getIndexProperties(tablePair.getFirst().getHashIndexedProps());
    boolean hasKeyWithInClause = false;
    Object[] keyValues = new Object[keyIndexProps.length];
    for (int keyIndex = 0; keyIndex < keyIndexProps.length; keyIndex++) {
        for (FilterSpecParam param : optionalFilter.getParameters()[0]) {
            if (param.getLookupable().getExpression().equals(keyIndexProps[keyIndex])) {
                if (param.getFilterOperator() == FilterOperator.IN_LIST_OF_VALUES) {
                    Object[] keyValuesList = ((MultiKeyUntyped) param.getFilterValue(null,
                            agentInstanceContext)).getKeys();
                    if (keyValuesList.length == 0) {
                        continue;
                    } else if (keyValuesList.length == 1) {
                        keyValues[keyIndex] = keyValuesList[0];
                    } else {
                        keyValues[keyIndex] = keyValuesList;
                        hasKeyWithInClause = true;
                    }
                } else {
                    keyValues[keyIndex] = param.getFilterValue(null, agentInstanceContext);
                }
                break;
            }
        }
    }

    // Analyze ranges - these may include key lookup value (EQUALS semantics)
    String[] rangeIndexProps = IndexedPropDesc.getIndexProperties(tablePair.getFirst().getRangeIndexedProps());
    RangeIndexLookupValue[] rangeValues;
    if (rangeIndexProps.length > 0) {
        rangeValues = compileRangeLookupValues(rangeIndexProps, optionalFilter.getParameters()[0],
                agentInstanceContext);
    } else {
        rangeValues = new RangeIndexLookupValue[0];
    }

    EventTable eventTable = tablePair.getSecond().getEventTable();
    IndexMultiKey indexMultiKey = tablePair.getFirst();

    // table lookup without in-clause
    if (!hasKeyWithInClause) {
        return fafTableLookup(virtualDataWindow, indexMultiKey, eventTable, keyValues, rangeValues,
                annotations);
    }

    // table lookup with in-clause: determine combinations
    Object[][] combinations = new Object[keyIndexProps.length][];
    for (int i = 0; i < keyValues.length; i++) {
        if (keyValues[i] instanceof Object[]) {
            combinations[i] = (Object[]) keyValues[i];
        } else {
            combinations[i] = new Object[] { keyValues[i] };
        }
    }

    // enumerate combinations
    CombinationEnumeration enumeration = new CombinationEnumeration(combinations);
    HashSet<EventBean> events = new HashSet<EventBean>();
    for (; enumeration.hasMoreElements();) {
        Object[] keys = enumeration.nextElement();
        Collection<EventBean> result = fafTableLookup(virtualDataWindow, indexMultiKey, eventTable, keys,
                rangeValues, annotations);
        events.addAll(result);
    }
    return events;
}

From source file:hotbeans.support.FileSystemHotBeanModuleRepository.java

/**
 * Internal method to update a module.//from w ww  . jav a 2  s  . c  o  m
 */
protected HotBeanModuleInfo updateModuleInternal(String moduleName, final InputStream moduleFileStream,
        final boolean add) {
    long revisionNumber = -1;
    HotBeanModuleInfo hotBeanModuleInfo = null;
    Log logger = this.getLog();

    synchronized (super.getLock()) {
        // If update - module name must be specified
        if (!add && ((moduleName == null) || (moduleName.trim().length() == 0)))
            throw new HotBeansException("Module name not specified!");

        RepositoryFileLock fileLock = null;
        File moduleTempFile = null;
        InputStream moduleTempFileStream = null;
        try {
            // Save module file to temp file
            moduleTempFile = File.createTempFile("hotBeanModule", ".jar");
            FileCopyUtils.copy(moduleFileStream, new FileOutputStream(moduleTempFile));

            // Get name from mainfest
            Manifest manifest = ModuleManifestUtils.readManifest(moduleTempFile);
            String jarFileModuleName = ModuleManifestUtils.getName(manifest);

            if (logger.isDebugEnabled())
                logger.debug("Module name in module manifest: '" + jarFileModuleName + "'.");

            // Validate name
            if (add) {
                if ((jarFileModuleName == null) || (jarFileModuleName.trim().length() == 0))
                    throw new InvalidModuleNameException("Module name not specified!");
                else if (super.getHotBeanModule(jarFileModuleName) != null)
                    throw new ModuleAlreadyExistsException("Module name already exists!");
            } else if (!moduleName.equals(jarFileModuleName))
                throw new InvalidModuleNameException(
                        "Module name in jar file doesn't match specified module name!");

            moduleName = jarFileModuleName;
            moduleTempFileStream = new FileInputStream(moduleTempFile);

            if (add & logger.isInfoEnabled())
                logger.info("Adding module '" + moduleName + "'.");

            fileLock = this.obtainRepositoryFileLock(false); // Obtain lock

            File moduleDirectory = new File(this.moduleRepositoryDirectory, moduleName);
            if (!moduleDirectory.exists())
                moduleDirectory.mkdirs();

            // Get next revision number
            revisionNumber = this.getLastRevisionOnFileSystem(moduleName);
            if (logger.isDebugEnabled()) {
                if (add)
                    logger.debug("Adding module - last revision on file system: " + revisionNumber + ".");
                else
                    logger.debug("Updating module - last revision on file system: " + revisionNumber + ".");
            }
            if (revisionNumber < 0)
                revisionNumber = 0;
            File moduleFile = new File(moduleDirectory, revisionNumber + MODULE_FILE_SUFFIX);

            while (moduleFile.exists()) // This should't really be necessary, but still...
            {
                revisionNumber++;
                moduleFile = new File(moduleDirectory, revisionNumber + MODULE_FILE_SUFFIX);
            }

            if (logger.isDebugEnabled()) {
                if (add)
                    logger.debug("Adding module - revision of new module: " + revisionNumber + ".");
                else
                    logger.debug("Updating module - revision of new module: " + revisionNumber + ".");
            }

            // Save module file
            FileCopyUtils.copy(moduleTempFileStream, new FileOutputStream(moduleFile));

            // Deploy at once
            hotBeanModuleInfo = this.loadModule(moduleName, revisionNumber);
        } catch (Exception e) {
            String moduleNameString = "";
            if (moduleName != null)
                moduleNameString = "'" + moduleName + "' ";

            if (add) {
                logger.error("Error adding module " + moduleNameString + "- " + e, e);
                if (e instanceof HotBeansException)
                    throw (HotBeansException) e;
                else
                    throw new HotBeansException("Error adding module " + moduleNameString + "- " + e, e);
            } else {
                logger.error("Error updating module " + moduleNameString + "- " + e, e);
                if (e instanceof HotBeansException)
                    throw (HotBeansException) e;
                else
                    throw new HotBeansException("Error updating module " + moduleNameString + "- " + e, e);
            }
        } finally {
            this.releaseRepositoryFileLock(fileLock);
            fileLock = null;

            if (moduleTempFileStream != null) {
                // Delete temp file
                try {
                    moduleTempFileStream.close();
                } catch (Exception e) {
                }
            }
            if (moduleTempFile != null)
                FileDeletor.delete(moduleTempFile);
        }
    }

    return hotBeanModuleInfo;
}

From source file:org.acmsl.queryj.customsql.handlers.customsqlvalidation.RetrieveQueryHandler.java

/**
 * Retrieves the current {@link Sql query}, and delegates
 * the flow to given chain./*from  w  ww . j  a  va 2 s  .co m*/
 * @param command the command.
 * @param chain the chain.
 * @return {@code false} if everything went fine.
 * @throws QueryJBuildException if the operation fails.
 */
protected boolean handle(@NotNull final QueryJCommand command, @NotNull final CustomQueryChain chain)
        throws QueryJBuildException {
    int t_iIndex = retrieveCurrentSqlIndex(command);

    @Nullable
    final Log t_Log = UniqueLogFactory.getLog(RetrieveQueryHandler.class);

    @NotNull
    final List<Sql<String>> t_lSql = retrieveSqlList(command);

    final int t_iTotalQueries = t_lSql.size();

    @Nullable
    final Chronometer t_Chronometer;

    if ((t_Log != null) && (t_Log.isInfoEnabled())) {
        t_Chronometer = new Chronometer();
        t_Log.info("Validating up to " + t_iTotalQueries + " queries. It can take some time.");
    } else {
        t_Chronometer = null;
    }

    while ((t_iIndex > -1) && (t_iIndex < t_lSql.size())) {
        @NotNull
        final Sql<String> t_Sql = t_lSql.get(t_iIndex);

        setCurrentSql(t_Sql, command);

        if ((t_Log != null) && (t_Log.isDebugEnabled())) {
            t_Log.debug("[" + t_iIndex + "/" + t_iTotalQueries + "] / " + t_Sql.getId());
        }
        setCurrentSqlIndex(t_iIndex++, command);
        chain.process(command);
    }

    if ((t_Log != null) && (t_Chronometer != null)) {
        t_Log.info("Validation took " + t_Chronometer.now());
    }

    return false;
}

From source file:org.acmsl.queryj.metadata.engines.AbstractJdbcMetadataManager.java

/**
 * {@inheritDoc}//from  w w  w . jav  a2 s .co  m
 */
@Override
public void eagerlyFetchMetadata() throws SQLException, QueryJException {
    if (getTables().size() == 0) {
        @Nullable
        final Chronometer t_Chronometer;

        @Nullable
        final Log t_Log = UniqueLogFactory.getLog(AbstractJdbcMetadataManager.class);
        if ((t_Log != null) && (t_Log.isInfoEnabled())) {
            t_Chronometer = new Chronometer();
            t_Log.info("Starting database crawl");
        } else {
            t_Chronometer = null;
        }
        setTables(extractTableMetadata(getTableNames(), getMetaData(), getCatalog(), getSchema(),
                isCaseSensitive(), getMetadataExtractionListener(), MetaLanguageUtils.getInstance()));

        if ((t_Log != null) && (t_Log.isInfoEnabled()) && (t_Chronometer != null)) {
            @NotNull
            final String t_strMessage = "Finished database crawl: " + t_Chronometer.now();
            t_Log.info(t_strMessage);
        }
    }
}

From source file:org.acmsl.queryj.metadata.engines.oracle.OracleMetadataManager.java

/**
 * Builds the table structures./*from   w  w  w  .  j a  va  2 s. c  om*/
 * @param tableMap the table map.
 * @param columnMap the column map.
 * @param primaryKeyMap the primary key map.
 * @param foreignKeyMap the foreign key map.
 * @param foreignKeyAttributeMap the foreign key attribute map.
 * @param caseSensitiveness whether it's case sensitive.
 * @param metaLanguageUtils the {@link MetaLanguageUtils} instance.
 */
protected void buildUpTables(@NotNull final Map<String, TableIncompleteValueObject> tableMap,
        @NotNull final Map<String, List<AttributeIncompleteValueObject>> columnMap,
        @NotNull final Map<String, List<AttributeIncompleteValueObject>> primaryKeyMap,
        @NotNull final Map<String, List<ForeignKeyIncompleteValueObject>> foreignKeyMap,
        @NotNull final Map<String, List<AttributeIncompleteValueObject>> foreignKeyAttributeMap,
        final boolean caseSensitiveness, @NotNull final MetaLanguageUtils metaLanguageUtils) {
    @Nullable
    final Log t_Log = UniqueLogFactory.getLog(OracleMetadataManager.class);
    @Nullable
    Chronometer t_Chronometer = null;

    if ((t_Log != null) && (t_Log.isInfoEnabled())) {
        t_Chronometer = new Chronometer();
        t_Log.info("Building up " + tableMap.size() + " tables ...");
    }

    for (@Nullable
    final TableIncompleteValueObject t_Table : tableMap.values()) {
        if (t_Table != null) {
            @Nullable
            final List<AttributeIncompleteValueObject> t_lColumns = columnMap.get(t_Table.getName());

            if (t_lColumns != null) {
                t_Table.setAttributes(toAttributeList(t_lColumns));
            }

            @Nullable
            final List<AttributeIncompleteValueObject> t_lPrimaryKeys = primaryKeyMap.get(t_Table.getName());

            if (t_lPrimaryKeys != null) {
                t_Table.setPrimaryKey(toAttributeList(t_lPrimaryKeys));
            }

            @Nullable
            final List<ForeignKeyIncompleteValueObject> t_lForeignKeys = foreignKeyMap.get(t_Table.getName());

            if (t_lForeignKeys != null) {
                for (@Nullable
                final ForeignKeyIncompleteValueObject t_ForeignKey : t_lForeignKeys) {
                    if (t_ForeignKey != null) {
                        final List<AttributeIncompleteValueObject> t_lForeignKeyAttributes = foreignKeyAttributeMap
                                .get(t_ForeignKey.getFkName());

                        if (t_lForeignKeyAttributes != null) {
                            t_ForeignKey.setAttributes(toAttributeList(t_lForeignKeyAttributes));
                        }
                    }
                }
                t_Table.setForeignKeys(toForeignKeyList(t_lForeignKeys));
            }
        }
    }

    if ((t_Log != null) && (t_Log.isInfoEnabled())) {
        t_Log.info("Table build up phase took " + t_Chronometer.now());

        t_Log.info("Processing table comments ...");
        t_Chronometer = new Chronometer();
    }

    // second round: fix table properties based on the table comments.
    processTableComments(tableMap.values(), metaLanguageUtils);

    if ((t_Log != null) && (t_Log.isInfoEnabled())) {
        t_Log.info("Processing table comments took " + t_Chronometer.now());
        t_Log.info("Building hierarchy relationships ...");
        t_Chronometer = new Chronometer();
    }

    // third round: parent tables
    bindParentChildRelationships(tableMap.values(), caseSensitiveness, metaLanguageUtils);

    if ((t_Log != null) && (t_Log.isInfoEnabled())) {
        t_Log.info("Hierarchy relationships took " + t_Chronometer.now());
        t_Log.info("Binding attributes ...");
        t_Chronometer = new Chronometer();
    }

    bindAttributes(tableMap.values(), columnMap);

    if ((t_Log != null) && (t_Log.isInfoEnabled())) {
        t_Log.info("Attribute binding took " + t_Chronometer.now());
    }
}

From source file:org.alfresco.extension.bulkimport.util.LogUtils.java

public final static boolean info(final Log log) {
    return (log.isInfoEnabled());
}

From source file:org.alfresco.web.app.servlet.BaseDownloadContentServlet.java

/**
 * Processes the download request using the current context i.e. no authentication checks are made, it is presumed
 * they have already been done./*from www.j  av  a 2s  . c  o  m*/
 * 
 * @param req
 *           The HTTP request
 * @param res
 *           The HTTP response
 * @param allowLogIn
 *           Indicates whether guest users without access to the content should be redirected to the log in page. If
 *           <code>false</code>, a status 403 forbidden page is displayed instead.
 */
protected void processDownloadRequest(HttpServletRequest req, HttpServletResponse res, boolean allowLogIn,
        boolean transmitContent) throws ServletException, IOException {
    Log logger = getLogger();
    String uri = req.getRequestURI();

    if (logger.isDebugEnabled()) {
        String queryString = req.getQueryString();
        logger.debug("Processing URL: " + uri
                + ((queryString != null && queryString.length() > 0) ? ("?" + queryString) : ""));
    }

    uri = uri.substring(req.getContextPath().length());
    StringTokenizer t = new StringTokenizer(uri, "/");
    int tokenCount = t.countTokens();

    t.nextToken(); // skip servlet name

    // attachment mode (either 'attach' or 'direct')
    String attachToken = t.nextToken();
    boolean attachment = URL_ATTACH.equals(attachToken) || URL_ATTACH_LONG.equals(attachToken);

    ServiceRegistry serviceRegistry = getServiceRegistry(getServletContext());

    // get or calculate the noderef and filename to download as
    NodeRef nodeRef;
    String filename;

    // do we have a path parameter instead of a NodeRef?
    String path = req.getParameter(ARG_PATH);
    if (path != null && path.length() != 0) {
        // process the name based path to resolve the NodeRef and the Filename element
        try {
            PathRefInfo pathInfo = resolveNamePath(getServletContext(), path);
            nodeRef = pathInfo.NodeRef;
            filename = pathInfo.Filename;
        } catch (IllegalArgumentException e) {
            Application.handleSystemError(getServletContext(), req, res, MSG_ERROR_NOT_FOUND,
                    HttpServletResponse.SC_NOT_FOUND, logger);
            return;
        }
    } else {
        // a NodeRef must have been specified if no path has been found
        if (tokenCount < 6) {
            throw new IllegalArgumentException("Download URL did not contain all required args: " + uri);
        }

        // assume 'workspace' or other NodeRef based protocol for remaining URL elements
        StoreRef storeRef = new StoreRef(URLDecoder.decode(t.nextToken()), URLDecoder.decode(t.nextToken()));
        String id = URLDecoder.decode(t.nextToken());

        // build noderef from the appropriate URL elements
        nodeRef = new NodeRef(storeRef, id);

        if (tokenCount > 6) {
            // found additional relative path elements i.e. noderefid/images/file.txt
            // this allows a url to reference siblings nodes via a cm:name based relative path
            // solves the issue with opening HTML content containing relative URLs in HREF or IMG tags etc.
            List<String> paths = new ArrayList<String>(tokenCount - 5);
            while (t.hasMoreTokens()) {
                paths.add(URLDecoder.decode(t.nextToken()));
            }
            filename = paths.get(paths.size() - 1);

            try {
                NodeRef parentRef = serviceRegistry.getNodeService().getPrimaryParent(nodeRef).getParentRef();
                FileInfo fileInfo = serviceRegistry.getFileFolderService().resolveNamePath(parentRef, paths);
                nodeRef = fileInfo.getNodeRef();
            } catch (FileNotFoundException e) {
                Application.handleSystemError(getServletContext(), req, res, MSG_ERROR_NOT_FOUND,
                        HttpServletResponse.SC_NOT_FOUND, logger);
                return;
            }
        } else {
            // filename is last remaining token
            filename = t.nextToken();
        }
    }

    // get qualified of the property to get content from - default to ContentModel.PROP_CONTENT
    QName propertyQName = ContentModel.PROP_CONTENT;
    String property = req.getParameter(ARG_PROPERTY);
    if (property != null && property.length() != 0) {
        propertyQName = QName.createQName(property);
    }

    if (logger.isDebugEnabled()) {
        logger.debug("Found NodeRef: " + nodeRef);
        logger.debug("Will use filename: " + filename);
        logger.debug("For property: " + propertyQName);
        logger.debug("With attachment mode: " + attachment);
    }

    // get the services we need to retrieve the content
    NodeService nodeService = serviceRegistry.getNodeService();
    ContentService contentService = serviceRegistry.getContentService();

    // Check that the node still exists
    if (!nodeService.exists(nodeRef)) {
        Application.handleSystemError(getServletContext(), req, res, MSG_ERROR_NOT_FOUND,
                HttpServletResponse.SC_NOT_FOUND, logger);
        return;
    }

    try {
        // check that the user has at least READ_CONTENT access - else redirect to an error or login page
        if (!checkAccess(req, res, nodeRef, PermissionService.READ_CONTENT, allowLogIn)) {
            return;
        }

        // check If-Modified-Since header and set Last-Modified header as appropriate
        Date modified = (Date) nodeService.getProperty(nodeRef, ContentModel.PROP_MODIFIED);
        if (modified != null) {
            long modifiedSince = req.getDateHeader(HEADER_IF_MODIFIED_SINCE);
            if (modifiedSince > 0L) {
                // round the date to the ignore millisecond value which is not supplied by header
                long modDate = (modified.getTime() / 1000L) * 1000L;
                if (modDate <= modifiedSince) {
                    if (logger.isDebugEnabled())
                        logger.debug("Returning 304 Not Modified.");
                    res.setStatus(HttpServletResponse.SC_NOT_MODIFIED);
                    return;
                }
            }
            res.setDateHeader(HEADER_LAST_MODIFIED, modified.getTime());
            res.setHeader(HEADER_CACHE_CONTROL, "must-revalidate, max-age=0");
            res.setHeader(HEADER_ETAG, "\"" + Long.toString(modified.getTime()) + "\"");
        }

        if (attachment == true) {
            setHeaderContentDisposition(req, res, filename);
        }

        // get the content reader
        ContentReader reader = contentService.getReader(nodeRef, propertyQName);
        // ensure that it is safe to use
        reader = FileContentReader.getSafeContentReader(reader,
                Application.getMessage(req.getSession(), MSG_ERROR_CONTENT_MISSING), nodeRef, reader);

        String mimetype = reader.getMimetype();
        // fall back if unable to resolve mimetype property
        if (mimetype == null || mimetype.length() == 0) {
            MimetypeService mimetypeMap = serviceRegistry.getMimetypeService();
            mimetype = MIMETYPE_OCTET_STREAM;
            int extIndex = filename.lastIndexOf('.');
            if (extIndex != -1) {
                String ext = filename.substring(extIndex + 1);
                mimetype = mimetypeMap.getMimetype(ext);
            }
        }

        // explicitly set the content disposition header if the content is powerpoint
        if (!attachment && (mimetype.equals(POWER_POINT_2007_DOCUMENT_MIMETYPE)
                || mimetype.equals(POWER_POINT_DOCUMENT_MIMETYPE))) {
            setHeaderContentDisposition(req, res, filename);
        }

        // get the content and stream directly to the response output stream
        // assuming the repo is capable of streaming in chunks, this should allow large files
        // to be streamed directly to the browser response stream.
        res.setHeader(HEADER_ACCEPT_RANGES, "bytes");

        // for a GET request, transmit the content else just the headers are sent
        if (transmitContent) {
            try {
                boolean processedRange = false;
                String range = req.getHeader(HEADER_CONTENT_RANGE);
                if (range == null) {
                    range = req.getHeader(HEADER_RANGE);
                }
                if (range != null) {
                    if (logger.isDebugEnabled())
                        logger.debug("Found content range header: " + range);

                    // ensure the range header is starts with "bytes=" and process the range(s)
                    if (range.length() > 6) {
                        HttpRangeProcessor rangeProcessor = new HttpRangeProcessor(contentService);
                        processedRange = rangeProcessor.processRange(res, reader, range.substring(6), nodeRef,
                                propertyQName, mimetype, req.getHeader(HEADER_USER_AGENT));
                    }
                }
                if (processedRange == false) {
                    if (logger.isDebugEnabled())
                        logger.debug("Sending complete file content...");

                    // set mimetype for the content and the character encoding for the stream
                    res.setContentType(mimetype);
                    res.setCharacterEncoding(reader.getEncoding());

                    // MNT-10642 Alfresco Explorer has javascript vulnerability opening HTML files
                    if (req.getRequestURI().contains("/d/d/") && (mimetype.equals("text/html")
                            || mimetype.equals("application/xhtml+xml") || mimetype.equals("text/xml"))) {
                        String content = reader.getContentString();

                        if (mimetype.equals("text/html") || mimetype.equals("application/xhtml+xml")) {
                            // process with HTML stripper
                            content = StringUtils.stripUnsafeHTMLTags(content, false);
                        } else if (mimetype.equals("text/xml") && mimetype.equals("text/x-component")) {
                            // IE supports "behaviour" which means that css can load a .htc file that could
                            // contain XSS code in the form of jscript, vbscript etc, to stop it form being
                            // evaluated we set the contient type to text/plain
                            res.setContentType("text/plain");
                        }

                        String encoding = reader.getEncoding();
                        byte[] bytes = encoding != null ? content.getBytes(encoding) : content.getBytes();
                        res.setContentLength(bytes.length);
                        res.getOutputStream().write(bytes);

                        return;
                    }

                    // return the complete entity range
                    long size = reader.getSize();
                    res.setHeader(HEADER_CONTENT_RANGE,
                            "bytes 0-" + Long.toString(size - 1L) + "/" + Long.toString(size));
                    res.setHeader(HEADER_CONTENT_LENGTH, Long.toString(size));
                    reader.getContent(res.getOutputStream());
                }
            } catch (SocketException e1) {
                // the client cut the connection - our mission was accomplished apart from a little error message
                if (logger.isDebugEnabled())
                    logger.debug("Client aborted stream read:\n\tnode: " + nodeRef + "\n\tcontent: " + reader);
            } catch (ContentIOException e2) {
                if (logger.isInfoEnabled())
                    logger.info("Failed stream read:\n\tnode: " + nodeRef + " due to: " + e2.getMessage());
            } catch (Throwable err) {
                if (err.getCause() instanceof SocketException) {
                    // the client cut the connection - our mission was accomplished apart from a little error message
                    if (logger.isDebugEnabled())
                        logger.debug(
                                "Client aborted stream read:\n\tnode: " + nodeRef + "\n\tcontent: " + reader);
                } else
                    throw err;
            }
        } else {
            if (logger.isDebugEnabled())
                logger.debug("HEAD request processed - no content sent.");
            res.getOutputStream().close();
        }
    } catch (Throwable err) {
        throw new AlfrescoRuntimeException(
                "Error during download content servlet processing: " + err.getMessage(), err);
    }
}