Example usage for java.util.logging Level FINER

List of usage examples for java.util.logging Level FINER

Introduction

In this page you can find the example usage for java.util.logging Level FINER.

Prototype

Level FINER

To view the source code for java.util.logging Level FINER.

Click Source Link

Document

FINER indicates a fairly detailed tracing message.

Usage

From source file:org.b3log.solo.processor.ArticleProcessor.java

/**
 * Shows archive articles with the specified context.
 * // w w w .jav a  2 s  . c  om
 * @param context the specified context
 * @param request the specified request
 * @param response the specified response 
 */
@RequestProcessing(value = "/archives/**", method = HTTPRequestMethod.GET)
public void showArchiveArticles(final HTTPRequestContext context, final HttpServletRequest request,
        final HttpServletResponse response) {
    final AbstractFreeMarkerRenderer renderer = new FrontRenderer();
    context.setRenderer(renderer);

    renderer.setTemplateName("archive-articles.ftl");

    try {
        String requestURI = request.getRequestURI();
        if (!requestURI.endsWith("/")) {
            requestURI += "/";
        }

        final String archiveDateString = getArchiveDate(requestURI);
        final int currentPageNum = getArchiveCurrentPageNum(requestURI);
        if (-1 == currentPageNum) {
            response.sendError(HttpServletResponse.SC_NOT_FOUND);
            return;
        }

        LOGGER.log(Level.FINER, "Request archive date[string={0}, currentPageNum={1}]",
                new Object[] { archiveDateString, currentPageNum });
        final JSONObject result = archiveDateQueryService.getByArchiveDateString(archiveDateString);
        if (null == result) {
            LOGGER.log(Level.WARNING, "Can not find articles for the specified archive date[string={0}]",
                    archiveDateString);
            response.sendError(HttpServletResponse.SC_NOT_FOUND);
            return;
        }

        final JSONObject archiveDate = result.getJSONObject(ArchiveDate.ARCHIVE_DATE);
        final String archiveDateId = archiveDate.getString(Keys.OBJECT_ID);

        final JSONObject preference = preferenceQueryService.getPreference();
        final int pageSize = preference.getInt(Preference.ARTICLE_LIST_DISPLAY_COUNT);

        final int articleCount = archiveDate.getInt(ArchiveDate.ARCHIVE_DATE_PUBLISHED_ARTICLE_COUNT);
        final int pageCount = (int) Math.ceil((double) articleCount / (double) pageSize);

        final List<JSONObject> articles = articleQueryService.getArticlesByArchiveDate(archiveDateId,
                currentPageNum, pageSize);
        if (articles.isEmpty()) {
            try {
                response.sendError(HttpServletResponse.SC_NOT_FOUND);
                return;
            } catch (final IOException ex) {
                LOGGER.severe(ex.getMessage());
            }
        }

        final boolean hasMultipleUsers = Users.getInstance().hasMultipleUsers();
        if (hasMultipleUsers) {
            filler.setArticlesExProperties(articles, preference);
        } else {
            if (!articles.isEmpty()) {
                final JSONObject author = articleUtils.getAuthor(articles.get(0));
                filler.setArticlesExProperties(articles, author, preference);
            }
        }

        sort(preference, articles);

        final Map<String, Object> dataModel = renderer.getDataModel();

        Skins.fillSkinLangs(preference.optString(Preference.LOCALE_STRING),
                (String) request.getAttribute(Keys.TEMAPLTE_DIR_NAME), dataModel);

        final String cachedTitle = prepareShowArchiveArticles(preference, dataModel, articles, currentPageNum,
                pageCount, archiveDateString, archiveDate);

        dataModel.put(Keys.PAGE_TYPE, PageTypes.DATE_ARTICLES);
        filler.fillBlogHeader(request, dataModel, preference);
        filler.fillSide(request, dataModel, preference);

        final Map<String, String> langs = langPropsService.getAll(Latkes.getLocale());
        request.setAttribute(PageCaches.CACHED_TYPE, langs.get(PageTypes.DATE_ARTICLES.getLangeLabel()));
        request.setAttribute(PageCaches.CACHED_OID, archiveDateId);
        request.setAttribute(PageCaches.CACHED_TITLE,
                cachedTitle + "  [" + langs.get("pageNumLabel") + "=" + currentPageNum + "]");
        request.setAttribute(PageCaches.CACHED_LINK, requestURI);
    } catch (final Exception e) {
        LOGGER.log(Level.SEVERE, e.getMessage(), e);

        try {
            response.sendError(HttpServletResponse.SC_NOT_FOUND);
        } catch (final IOException ex) {
            LOGGER.severe(ex.getMessage());
        }
    }
}

From source file:org.jenkinsci.plugins.pipeline.maven.WithMavenStepExecution.java

/**
 * Generates the content of the maven wrapper script
 *
 * @param mvnExec maven executable location
 * @return wrapper script content//from   www . j  av  a2s.c  o m
 * @throws AbortException when problems creating content
 */
private String generateMavenWrapperScriptContent(FilePath mvnExec) throws AbortException {

    boolean isUnix = Boolean.TRUE.equals(getComputer().isUnix());

    StringBuilder script = new StringBuilder();

    if (isUnix) { // Linux, Unix, MacOSX
        String lineSep = "\n";
        script.append("#!/bin/sh -e").append(lineSep);
        script.append("echo ----- withMaven Wrapper script -----").append(lineSep);
        script.append(mvnExec.getRemote() + " $MAVEN_CONFIG \"$@\"").append(lineSep);

    } else { // Windows
        String lineSep = "\r\n";
        script.append("@echo off").append(lineSep);
        script.append("echo ----- withMaven Wrapper script -----").append(lineSep);
        script.append(mvnExec.getRemote() + " %MAVEN_CONFIG% %*").append(lineSep);
    }

    LOGGER.log(Level.FINER, "Generated Maven wrapper script: \n{0}", script);
    return script.toString();
}

From source file:com.kenai.redminenb.repository.RedmineRepository.java

private void setupIssueRefreshTask() {
    if (refreshIssuesTask == null) {
        refreshIssuesTask = getRequestProcessor().create(new Runnable() {
            @Override//from   www.ja  v a  2s .co m
            public void run() {
                Set<String> ids;
                synchronized (issuesToRefresh) {
                    ids = new HashSet<String>(issuesToRefresh);
                }
                if (ids.isEmpty()) {
                    Redmine.LOG.log(Level.FINE, "no issues to refresh {0}", getDisplayName()); // NOI18N
                    return;
                }
                Redmine.LOG.log(Level.FINER, "preparing to refresh issue {0} - {1}",
                        new Object[] { getDisplayName(), ids }); // NOI18N
                scheduleIssueRefresh();
            }
        });
        scheduleIssueRefresh();
    }
}

From source file:de.duenndns.ssl.MemorizingTrustManager.java

int interact(final String message, final int titleId) {
    /* prepare the MTMDecision blocker object */
    MTMDecision choice = new MTMDecision();
    final int myId = createDecisionId(choice);

    masterHandler.post(new Runnable() {
        public void run() {
            Intent ni = new Intent(master, MemorizingActivity.class);
            ni.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
            ni.setData(Uri.parse(MemorizingTrustManager.class.getName() + "/" + myId));
            ni.putExtra(DECISION_INTENT_ID, myId);
            ni.putExtra(DECISION_INTENT_CERT, message);
            ni.putExtra(DECISION_TITLE_ID, titleId);

            // we try to directly start the activity and fall back to
            // making a notification
            try {
                getUI().startActivity(ni);
            } catch (Exception e) {
                LOGGER.log(Level.FINE, "startActivity(MemorizingActivity)", e);
            }/*from   w ww . j  a  v  a2 s.  c om*/
        }
    });

    LOGGER.log(Level.FINE, "openDecisions: " + openDecisions + ", waiting on " + myId);
    try {
        synchronized (choice) {
            choice.wait();
        }
    } catch (InterruptedException e) {
        LOGGER.log(Level.FINER, "InterruptedException", e);
    }
    LOGGER.log(Level.FINE, "finished wait on " + myId + ": " + choice.state);
    return choice.state;
}

From source file:com.kenai.redminenb.repository.RedmineRepository.java

private void setupQueryRefreshTask() {
    if (refreshQueryTask == null) {
        refreshQueryTask = getRequestProcessor().create(new Runnable() {
            @Override//from www  . j av  a2s.c  o  m
            public void run() {
                try {
                    Set<RedmineQuery> queries;
                    synchronized (refreshQueryTask) {
                        queries = new HashSet<RedmineQuery>(queriesToRefresh);
                    }
                    if (queries.isEmpty()) {
                        Redmine.LOG.log(Level.FINE, "no queries to refresh {0}",
                                new Object[] { getDisplayName() }); // NOI18N
                        return;
                    }
                    for (RedmineQuery q : queries) {
                        Redmine.LOG.log(Level.FINER, "preparing to refresh query {0} - {1}",
                                new Object[] { q.getDisplayName(), getDisplayName() }); // NOI18N
                        RedmineQueryController qc = q.getController();
                        qc.autoRefresh();
                    }
                } finally {
                    scheduleQueryRefresh();
                }
            }
        });
        scheduleQueryRefresh();
    }
}

From source file:hudson.plugins.active_directory.ActiveDirectoryUnixAuthenticationProvider.java

/**
 * Resolves all the groups that the user is in.
 *
 * We now use <a href="http://msdn.microsoft.com/en-us/library/windows/desktop/ms680275(v=vs.85).aspx">tokenGroups</a>
 * attribute, which is a computed attribute that lists all the SIDs of the groups that the user is directly/indirectly in.
 * We then use that to retrieve all the groups in one query and resolve their canonical names.
 *
 * @param userDN//w w w.  j  a va  2s .c  om
 *      User's distinguished name.
 * @param context Used for making queries.
 */
private Set<GrantedAuthority> resolveGroups(String domainDN, String userDN, DirContext context)
        throws NamingException {
    if (userDN.contains("/")) {
        userDN = userDN.replace("/", "\\/");
    }
    Set<GrantedAuthority> groups = new HashSet<GrantedAuthority>();

    LOGGER.log(Level.FINER, "Looking up group of {0}", userDN);
    Attributes id = context.getAttributes(userDN, new String[] { "tokenGroups", "memberOf", "CN" });
    Attribute tga = id.get("tokenGroups");

    if (tga == null) {
        // tga will be null if you are not using a global catalogue
        // or if the user is not actually a member of any security groups.
        LOGGER.log(Level.FINE, "Failed to retrieve tokenGroups for {0}", userDN);
        // keep on trucking as we can still use memberOf for Distribution Groups.
    } else {
        // build up the query to retrieve all the groups
        StringBuilder query = new StringBuilder("(|");
        List<byte[]> sids = new ArrayList<byte[]>();

        NamingEnumeration<?> tokenGroups = tga.getAll();
        while (tokenGroups.hasMore()) {
            byte[] gsid = (byte[]) tokenGroups.next();
            query.append("(objectSid={" + sids.size() + "})");
            sids.add(gsid);
        }
        tokenGroups.close();

        query.append(")");

        NamingEnumeration<SearchResult> renum = new LDAPSearchBuilder(context, domainDN).subTreeScope()
                .returns("cn").search(query.toString(), sids.toArray());
        parseMembers(userDN, groups, renum);
        renum.close();
    }

    {/*
     stage 2: use memberOf to find groups that aren't picked up by tokenGroups.
     This includes distribution groups
        */
        LOGGER.fine("Stage 2: looking up via memberOf");

        while (true) {
            switch (groupLookupStrategy) {
            case TOKENGROUPS:
                // no extra lookup - ever.
                return groups;
            case AUTO:
                // try the accurate one first, and if it's too slow fall back to recursive in the hope that it's faster
                long start = System.nanoTime();
                boolean found = false;
                long duration = 0;
                try {
                    found = chainGroupLookup(domainDN, userDN, context, groups);
                    duration = TimeUnit2.NANOSECONDS.toSeconds(System.nanoTime() - start);
                } catch (TimeLimitExceededException e) {
                    LOGGER.log(Level.WARNING,
                            "The LDAP request did not terminate within the specified time limit. AD will fall back to recursive lookup",
                            e);
                } catch (NamingException e) {
                    if (e.getMessage().contains("LDAP response read timed out")) {
                        LOGGER.log(Level.WARNING,
                                "LDAP response read time out. AD will fall back to recursive lookup", e);
                    } else {
                        throw e;
                    }
                }
                if (!found && duration >= 10) {
                    LOGGER.log(Level.WARNING,
                            "Group lookup via Active Directory's 'LDAP_MATCHING_RULE_IN_CHAIN' extension timed out after {0} seconds. Falling back to recursive group lookup strategy for this and future queries",
                            duration);
                    groupLookupStrategy = GroupLookupStrategy.RECURSIVE;
                    continue;
                } else if (found && duration >= 10) {
                    LOGGER.log(Level.WARNING,
                            "Group lookup via Active Directory's 'LDAP_MATCHING_RULE_IN_CHAIN' extension matched user's groups but took {0} seconds to run. Switching to recursive lookup for future group lookup queries",
                            duration);
                    groupLookupStrategy = GroupLookupStrategy.RECURSIVE;
                    return groups;
                } else if (!found) {
                    LOGGER.log(Level.WARNING,
                            "Group lookup via Active Directory's 'LDAP_MATCHING_RULE_IN_CHAIN' extension failed. Falling back to recursive group lookup strategy for this and future queries");
                    groupLookupStrategy = GroupLookupStrategy.RECURSIVE;
                    continue;
                } else {
                    // it run fast enough, so let's stick to it
                    groupLookupStrategy = GroupLookupStrategy.CHAIN;
                    return groups;
                }
            case RECURSIVE:
                recursiveGroupLookup(context, id, groups);
                return groups;
            case CHAIN:
                chainGroupLookup(domainDN, userDN, context, groups);
                return groups;
            }
        }
    }
}

From source file:org.b3log.solo.service.ArticleMgmtService.java

/**
 * Processes tags for article update.//from w w w .j  av  a2s .co  m
 *
 * <ul> <li>Un-tags old article, decrements tag reference count</li>
 * <li>Removes old article-tag relations</li> <li>Saves new article-tag
 * relations with tag reference count</li> </ul>
 *
 * @param oldArticle the specified old article
 * @param newArticle the specified new article
 * @throws Exception exception
 */
private void processTagsForArticleUpdate(final JSONObject oldArticle, final JSONObject newArticle)
        throws Exception {
    // TODO: public -> private
    final String oldArticleId = oldArticle.getString(Keys.OBJECT_ID);
    final List<JSONObject> oldTags = tagRepository.getByArticleId(oldArticleId);
    final String tagsString = newArticle.getString(Article.ARTICLE_TAGS_REF);
    String[] tagStrings = tagsString.split(",");
    final List<JSONObject> newTags = new ArrayList<JSONObject>();
    for (int i = 0; i < tagStrings.length; i++) {
        final String tagTitle = tagStrings[i].trim();
        JSONObject newTag = tagRepository.getByTitle(tagTitle);
        if (null == newTag) {
            newTag = new JSONObject();
            newTag.put(Tag.TAG_TITLE, tagTitle);
        }
        newTags.add(newTag);
    }

    final List<JSONObject> tagsDropped = new ArrayList<JSONObject>();
    final List<JSONObject> tagsNeedToAdd = new ArrayList<JSONObject>();
    final List<JSONObject> tagsUnchanged = new ArrayList<JSONObject>();
    for (final JSONObject newTag : newTags) {
        final String newTagTitle = newTag.getString(Tag.TAG_TITLE);
        if (!tagExists(newTagTitle, oldTags)) {
            LOGGER.log(Level.FINER, "Tag need to add[title={0}]", newTagTitle);
            tagsNeedToAdd.add(newTag);
        } else {
            tagsUnchanged.add(newTag);
        }
    }
    for (final JSONObject oldTag : oldTags) {
        final String oldTagTitle = oldTag.getString(Tag.TAG_TITLE);
        if (!tagExists(oldTagTitle, newTags)) {
            LOGGER.log(Level.FINER, "Tag dropped[title={0}]", oldTag);
            tagsDropped.add(oldTag);
        } else {
            tagsUnchanged.remove(oldTag);
        }
    }

    LOGGER.log(Level.FINER, "Tags unchanged[{0}]", tagsUnchanged);
    for (final JSONObject tagUnchanged : tagsUnchanged) {
        final String tagId = tagUnchanged.optString(Keys.OBJECT_ID);
        if (null == tagId) {
            continue; // Unchanged tag always exist id
        }
        final int publishedRefCnt = tagUnchanged.getInt(Tag.TAG_PUBLISHED_REFERENCE_COUNT);
        if (oldArticle.getBoolean(Article.ARTICLE_IS_PUBLISHED)) {
            if (!newArticle.getBoolean(Article.ARTICLE_IS_PUBLISHED)) {
                tagUnchanged.put(Tag.TAG_PUBLISHED_REFERENCE_COUNT, publishedRefCnt - 1);
                tagRepository.update(tagId, tagUnchanged);
            }
        } else {
            if (newArticle.getBoolean(Article.ARTICLE_IS_PUBLISHED)) {
                tagUnchanged.put(Tag.TAG_PUBLISHED_REFERENCE_COUNT, publishedRefCnt + 1);
                tagRepository.update(tagId, tagUnchanged);
            }
        }
    }

    for (final JSONObject tagDropped : tagsDropped) {
        final String tagId = tagDropped.getString(Keys.OBJECT_ID);
        final int refCnt = tagDropped.getInt(Tag.TAG_REFERENCE_COUNT);
        tagDropped.put(Tag.TAG_REFERENCE_COUNT, refCnt - 1);
        final int publishedRefCnt = tagDropped.getInt(Tag.TAG_PUBLISHED_REFERENCE_COUNT);
        if (oldArticle.getBoolean(Article.ARTICLE_IS_PUBLISHED)) {
            tagDropped.put(Tag.TAG_PUBLISHED_REFERENCE_COUNT, publishedRefCnt - 1);
        }

        tagRepository.update(tagId, tagDropped);
    }

    final String[] tagIdsDropped = new String[tagsDropped.size()];
    for (int i = 0; i < tagIdsDropped.length; i++) {
        final JSONObject tag = tagsDropped.get(i);
        final String id = tag.getString(Keys.OBJECT_ID);
        tagIdsDropped[i] = id;
    }

    removeTagArticleRelations(oldArticleId,
            0 == tagIdsDropped.length ? new String[] { "l0y0l" } : tagIdsDropped);

    tagStrings = new String[tagsNeedToAdd.size()];
    for (int i = 0; i < tagStrings.length; i++) {
        final JSONObject tag = tagsNeedToAdd.get(i);
        final String tagTitle = tag.getString(Tag.TAG_TITLE);
        tagStrings[i] = tagTitle;
    }
    final JSONArray tags = tag(tagStrings, newArticle);

    addTagArticleRelation(tags, newArticle);
}

From source file:com.ibm.jaggr.core.impl.layer.LayerImpl.java

@SuppressWarnings("unchecked")
@Override/*from ww w .ja  v  a 2 s . com*/
public long getLastModified(HttpServletRequest request) throws IOException {
    long lastModified = _lastModified;
    IAggregator aggregator = (IAggregator) request.getAttribute(IAggregator.AGGREGATOR_REQATTRNAME);
    IOptions options = aggregator.getOptions();
    // Don't check last modified dates of source files on every request in production mode
    // for performance reasons.  _validateLastModified is a transient that gets initialize
    // to true whenever this object is de-serialized (i.e. on server startup).
    if (lastModified == -1 || _validateLastModified.getAndSet(false) || options.isDevelopmentMode()) {
        // see if we already determined the last modified time for this request
        Object obj = request.getAttribute(LAST_MODIFIED_PROPNAME);
        if (obj == null) {
            // Determine latest last-modified time from source files in moduleList
            ModuleList moduleFiles = getModules(request);
            lastModified = getLastModified(aggregator, moduleFiles);
            // Get last-modified date of config file
            lastModified = Math.max(lastModified, aggregator.getConfig().lastModified());
            List<String> cacheInfoReport = null;
            if (_isReportCacheInfo) {
                cacheInfoReport = (List<String>) request.getAttribute(LAYERCACHEINFO_PROPNAME);
            }
            synchronized (this) {
                if (_lastModified == -1) {
                    // Initialize value of instance property
                    _lastModified = lastModified;
                    if (cacheInfoReport != null) {
                        cacheInfoReport.add("update_lastmod1"); //$NON-NLS-1$
                    }
                }
            }
            request.setAttribute(LAST_MODIFIED_PROPNAME, lastModified);
            if (log.isLoggable(Level.FINER)) {
                log.finer("Returning calculated last modified " //$NON-NLS-1$
                        + lastModified + " for layer " + //$NON-NLS-1$
                        request.getAttribute(IHttpTransport.REQUESTEDMODULENAMES_REQATTRNAME).toString());
            }
        } else {
            lastModified = (Long) obj;
            if (log.isLoggable(Level.FINER)) {
                log.finer("Returning last modified " //$NON-NLS-1$
                        + lastModified + " from request for layer " + //$NON-NLS-1$
                        request.getAttribute(IHttpTransport.REQUESTEDMODULENAMES_REQATTRNAME).toString());
            }
        }
    } else {
        if (log.isLoggable(Level.FINER)) {
            log.finer("Returning cached last modified " //$NON-NLS-1$
                    + lastModified + " for layer " + //$NON-NLS-1$
                    request.getAttribute(IHttpTransport.REQUESTEDMODULENAMES_REQATTRNAME).toString());
        }
    }
    return lastModified;
}

From source file:com.ibm.jaggr.core.impl.transport.AbstractHttpTransport.java

/**
 * Returns the dynamic portion of the loader extension javascript for this
 * transport.  This includes all registered extension contributions.
 *
 * @return the dynamic portion of the loader extension javascript
 *///from w w  w . j a v a2  s.  c o m
protected String getDynamicLoaderExtensionJavaScript() {
    final String sourceMethod = "getDynamicLoaderExtensionJavaScript"; //$NON-NLS-1$
    boolean isTraceLogging = log.isLoggable(Level.FINER);
    if (isTraceLogging) {
        log.entering(AbstractHttpTransport.class.getName(), sourceMethod);
    }
    StringBuffer sb = new StringBuffer();
    for (String contribution : getExtensionContributions()) {
        sb.append(contribution).append("\r\n"); //$NON-NLS-1$
    }
    String cacheBust = AggregatorUtil.getCacheBust(aggregator);
    if (cacheBust != null && cacheBust.length() > 0) {
        sb.append("if (!require.combo.cacheBust){combo.cacheBust = '") //$NON-NLS-1$
                .append(cacheBust).append("';}\r\n"); //$NON-NLS-1$
    }
    if (moduleIdListHash != null) {
        sb.append("require.combo.reg(null, ["); //$NON-NLS-1$
        for (int i = 0; i < moduleIdListHash.length; i++) {
            sb.append(i == 0 ? "" : ", ").append(((int) moduleIdListHash[i]) & 0xFF); //$NON-NLS-1$ //$NON-NLS-2$
        }
        sb.append("]);\r\n"); //$NON-NLS-1$
    }
    sb.append(clientRegisterSyntheticModules());
    if (isTraceLogging) {
        log.exiting(AbstractHttpTransport.class.getName(), sourceMethod, sb.toString());
    }
    return sb.toString();
}

From source file:edu.umass.cs.reconfiguration.SQLReconfiguratorDB.java

private synchronized boolean putReconfigurationRecordIfNotName(ReconfigurationRecord<NodeIDType> record,
        String rcGroupName, String mergee) {

    // if RC group record, it must match rcGroupName
    if (this.isRCGroupName(record.getName()) && !record.getName().equals(rcGroupName))
        return false;
    // special case coz mergee may not be recognized by isRCGroupName
    else if (record.getName().equals(mergee))
        return false;

    // else good to insert and set pending if needed
    this.putReconfigurationRecord(record, rcGroupName);
    if (!record.isReady())
        this.setPending(record.getName(), true, true);
    log.log(Level.FINER, "{0} inserted RC record named {1} to RC group {2}",
            new Object[] { this, record.getName(), rcGroupName });
    return true;//from  ww  w .  java2s.  co  m
}