Example usage for java.util.logging Level FINER

List of usage examples for java.util.logging Level FINER

Introduction

In this page you can find the example usage for java.util.logging Level FINER.

Prototype

Level FINER

To view the source code for java.util.logging Level FINER.

Click Source Link

Document

FINER indicates a fairly detailed tracing message.

Usage

From source file:com.granule.json.utils.internal.JSONSAXHandler.java

public void characters(char[] ch, int start, int length) throws SAXException {
    if (logger.isLoggable(Level.FINER))
        logger.entering(className, "characters(char[], int, int)");

    String str = new String(ch, start, length);
    if (this.current.getTagText() != null) {
        str = this.current.getTagText() + str;
    }/* w w  w.  j av a  2 s.c  o  m*/
    this.current.setTagText(str);

    if (logger.isLoggable(Level.FINER))
        logger.exiting(className, "characters(char[], int, int)");
}

From source file:com.github.mike10004.jenkinsbbhook.WebhookHandler.java

/**
 * POST method for handling webhook.//from  www  .  j a va 2s  . co  m
 * @param content representation for the resource
 * @return an HTTP response with content of the updated or created resource.
 */
@POST
@Consumes("application/json")
@Produces("application/json")
public String relayBuildRequest(@Context HttpServletRequest request, @QueryParam("job") String jobName,
        @QueryParam("project_token") String projectToken, @QueryParam("username") String username,
        @QueryParam("api_token") String apiToken, String content) throws IOException {
    if (!"repo:push".equals(request.getHeader("X-Event-Key"))) {
        throw new BadRequestException("unhandled event key header value");
    }
    Push push;
    try {
        push = gson.fromJson(content, Push.class);
    } catch (JsonSyntaxException e) {
        throw new BadRequestException("malformed json in request body", e);
    }
    jobName = Strings.nullToEmpty(jobName);
    projectToken = Strings.nullToEmpty(projectToken);
    username = Strings.nullToEmpty(username);
    apiToken = Strings.nullToEmpty(apiToken);
    log.log(Level.INFO, "jobName={0}; projectToken.length={1}; username={2}, apiToken.length={3}; {4}",
            new Object[] { jobName, projectToken.length(), username, apiToken.length(), push });
    if (projectToken.isEmpty()) {
        throw new BadRequestException("project_token query parameter absent or empty");
    }
    if (jobName.isEmpty()) {
        throw new BadRequestException("could not derive job name from path segment");
    }
    if (apiToken.isEmpty()) {
        throw new BadRequestException("api_token query parameter absent or empty");
    }
    if (username.isEmpty()) {
        throw new BadRequestException("username query parameter absent or empty");
    }
    AppParams appParams = loadAppParams();
    if (appParams.getJenkinsBaseUrl() == null) {
        throw new InternalServerErrorException("server not configured: init param "
                + ContextAppParams.PARAM_JENKINS_BASE_URL + " is required");
    }
    PostRequestRelayer relayer = constructRelayer(appParams);
    CrumbData crumbData = relayer.fetchCrumbData(appParams, username, apiToken);
    log.log(Level.FINER, "crumb data: {0}", crumbData);
    URI postedUri = relayer.sendBuildPostRequest(appParams, crumbData, jobName, projectToken);
    ResponseData responseData = new ResponseData(appParams.getJenkinsBaseUrl(), crumbData, jobName,
            postedUri.toString(), appParams.isSimulation());
    return gson.toJson(responseData);
}

From source file:hudson.tasks.Mailer.java

@Override
public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener)
        throws IOException, InterruptedException {
    if (debug)/*from  ww w  . j  av  a 2s  .c  o  m*/
        listener.getLogger().println("Running mailer");
    // substitute build parameters
    EnvVars env = build.getEnvironment(listener);
    String recip = env.expand(recipients);

    return new MailSender(recip, dontNotifyEveryUnstableBuild, sendToIndividuals, descriptor().getCharset()) {
        /** Check whether a path (/-separated) will be archived. */
        @Override
        public boolean artifactMatches(String path, AbstractBuild<?, ?> build) {
            ArtifactArchiver aa = build.getProject().getPublishersList().get(ArtifactArchiver.class);
            if (aa == null) {
                LOGGER.finer("No ArtifactArchiver found");
                return false;
            }
            String artifacts = aa.getArtifacts();
            for (String include : artifacts.split("[, ]+")) {
                String pattern = include.replace(File.separatorChar, '/');
                if (pattern.endsWith("/")) {
                    pattern += "**";
                }
                if (SelectorUtils.matchPath(pattern, path)) {
                    LOGGER.log(Level.FINER, "DescriptorImpl.artifactMatches true for {0} against {1}",
                            new Object[] { path, pattern });
                    return true;
                }
            }
            LOGGER.log(Level.FINER, "DescriptorImpl.artifactMatches for {0} matched none of {1}",
                    new Object[] { path, artifacts });
            return false;
        }
    }.execute(build, listener);
}

From source file:com.ibm.jaggr.core.util.ZipUtil.java

/**
 * Extracts the directory entry to the location specified by {@code dir}
 *
 * @param entry/*  w w w. ja  v  a 2s  . co m*/
 *            the {@link ZipEntry} object for the directory being extracted
 * @param dir
 *            the {@link File} object for the target directory
 *
 * @throws IOException
 */
private static void extractDirectory(ZipEntry entry, File dir) throws IOException {
    final String sourceMethod = "extractFile"; //$NON-NLS-1$
    final boolean isTraceLogging = log.isLoggable(Level.FINER);
    if (isTraceLogging) {
        log.entering(sourceClass, sourceMethod, new Object[] { entry, dir });
    }

    dir.mkdir(); // May fail if the directory has already been created
    if (!dir.setLastModified(entry.getTime())) {
        throw new IOException("Failed to set last modified time for " + dir.getAbsolutePath()); //$NON-NLS-1$
    }

    if (isTraceLogging) {
        log.exiting(sourceClass, sourceMethod);
    }
}

From source file:com.esri.geoportal.commons.csw.client.impl.CapabilitiesParse.java

@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
    if (localName.equals("")) {
        localName = qName;//from  w w  w . jav a 2 s .com
    }

    LOG.finer("TEXT IN ELEMENT " + StringEscapeUtils.escapeJava(text.toString()));
    LOG.finer("ELEMENT END " + StringEscapeUtils.escapeJava(localName));
    try {
        text = new StringBuffer(chkStr(text.toString()));
        if (capabilities && getRecords && post && constraint && localName.equalsIgnoreCase("Value")
                && text.toString().equalsIgnoreCase("XML") && getRecordsList.peek() != null) {
            cap.set_getRecordsPostURL(getRecordsList.pop().toString());
        } else if (capabilities && getRecords && post && constraint && localName.equalsIgnoreCase("Value")
                && !text.toString().equalsIgnoreCase("XML") && getRecordsList.peek() != null) {
            getRecordsList.pop();
        }

        if (capabilities && getRecords && localName.equalsIgnoreCase("HTTP") && getRecordsList.peek() != null) {
            String tmp = chkStr(cap.get_getRecordsPostURL());
            if ("".equals(tmp)) {
                cap.set_getRecordsPostURL(getRecordsList.pop().toString());
            }
        }
        if (capabilities && localName.equalsIgnoreCase("Abstract")) {
            cap.setAbstractText(text.toString());
        }
        if (capabilities && localName.equalsIgnoreCase("Title")) {
            cap.setTitle(text.toString());
        }
    } catch (Throwable e) {
        LOG.log(Level.FINER, "Error while getting getrecords url", e);
    }
    tracking(localName, false);

}

From source file:Peer.java

@Override
public Key getSuccessor(Key key) throws Exception {
    lg.log(Level.FINEST, "getSuccessor Entry");

    lg.log(Level.FINER, "getSuccessor Calling succ:" + succ + " from peer:" + nodeid + " with key:" + key);

    // Ensure this peers successor is up to date 
    succ = superpeer.getSuccessor(nodeid);

    // If we have no successor, this peer is the successor
    if (succ == null) {
        return nodeid;
    }/*www . ja v a  2s.  c  om*/

    // Ensure this peers predecessor is up to date
    pred = superpeer.getPredecessor(nodeid);

    // Get the max key value 
    Key max = new Key(BigInteger.valueOf((int) Math.pow(2, hasher.getBitSize()))).pred();

    // If this peer knows the which peer that key belongs to ...
    if (
    // Normal increasing range case
    (nodeid.compare(key) < 0 && key.compare(succ) <= 0)
            // Modulo case
            || (pred.compare(nodeid) > 0 && (key.compare(pred) > 0 && key.compare(max) <= 0)
                    || (key.compare(nodeid) <= 0))) {
        lg.log(Level.FINER, "getSuccessor - Known successor.");
        lg.log(Level.FINEST, "getSuccesssor Exit");
        return succ;
    }
    // ... else ask this peers successor
    else {
        lg.log(Level.FINER, "getSuccessor - Unknown successor.");
        try {
            lg.log(Level.FINEST, "getSuccesssor Exit");
            return getPeer(succ).getSuccessor(key);
        } catch (Exception e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }

    lg.log(Level.WARNING, "getSuccessor returning null");
    lg.log(Level.FINEST, "getSuccesssor Exit");
    return null;
}

From source file:org.geoserver.bkprst.BrTask.java

/**
 * Writes an XML file containing data about current backup
 *  /*from   w w  w  .j av  a  2 s  .c  o m*/
 * @path Directory to write the info in
 * 
 * @return true on success, false otherwise 
 */
protected boolean writeBackupInfo(String path) {

    File xmlFile = new File(path + File.separatorChar + BrTask.INFOFILE);
    try {
        FileUtils.writeStringToFile(xmlFile, this.br.toXML(this));
    } catch (IOException e) {
        LOGGER.log(Level.FINER, e.getMessage(), e);
        return false;
    }
    return true;
}

From source file:SuperPeer.java

@Override
public synchronized String getAddress(Key id) throws Exception {
    lg.log(Level.FINEST, "getAddress Entry.");
    PeerInfo fe = null;//from   w w  w.  ja  v  a2  s  . c  o  m
    Iterator<PeerInfo> it = peertable.iterator();
    while (it.hasNext()) {
        fe = it.next();
        lg.log(Level.FINER, "getAddress - Checking " + fe.getNodeId().toString() + " for match ...");
        if (fe.getNodeId().equals(id)) {
            lg.log(Level.FINER, "getAddress - match found.");
            lg.log(Level.FINEST, "getAddress Exit.");
            return fe.getIP();
        }
    }

    lg.log(Level.WARNING, "getAddress failed on " + id.toString() + ", returning null!");
    lg.log(Level.FINEST, "getAddress Entry.");
    return null;
}

From source file:com.ibm.jaggr.core.impl.modulebuilder.less.LessModuleBuilder.java

protected String processLess(String filename, String css) throws IOException {
    final String sourceMethod = "processLess"; //$NON-NLS-1$
    final boolean isTraceLogging = log.isLoggable(Level.FINER);
    if (isTraceLogging) {
        log.entering(sourceClass, sourceMethod, new Object[] { filename, css });
    }/*from   w w  w .  ja va 2 s  . co  m*/
    Context cx = Context.enter();
    Scriptable threadScope = null;
    try {
        threadScope = getThreadScopes().poll(SCOPE_POOL_TIMEOUT_SECONDS, TimeUnit.SECONDS);
        if (threadScope == null) {
            throw new TimeoutException("Timeout waiting for thread scope"); //$NON-NLS-1$
        }
        Scriptable scope = cx.newObject(threadScope);
        scope.setParentScope(threadScope);
        Scriptable options = cx.newObject(threadScope);
        options.put("filename", options, filename); //$NON-NLS-1$
        Function compiler = (Function) threadScope.get(LESS_COMPILER_VAR, threadScope);
        css = compiler.call(cx, scope, null, new Object[] { css, options }).toString();

    } catch (JavaScriptException e) {
        // Add module info
        String message = "Error parsing " + filename + "\r\n" + e.getMessage(); //$NON-NLS-1$ //$NON-NLS-2$
        throw new IOException(message, e);
    } catch (InterruptedException e) {
        throw new IOException(e);
    } catch (TimeoutException e) {
        throw new RuntimeException(e);
    } finally {
        if (threadScope != null) {
            getThreadScopes().add(threadScope);
        }
        Context.exit();
    }
    if (isTraceLogging) {
        log.exiting(sourceMethod, sourceMethod, css);
    }
    return css;
}

From source file:org.b3log.solo.processor.StatProcessor.java

/**
 * Increments Blog/Articles view counter.
 * // w w w.j a va 2  s .c  om
 * @param context the specified context
 */
@RequestProcessing(value = "/console/stat/viewcnt", method = HTTPRequestMethod.GET)
public void viewCounter(final HTTPRequestContext context) {
    LOGGER.log(Level.INFO, "Sync statistic from memcache to repository");

    context.setRenderer(new DoNothingRenderer());

    final JSONObject statistic = (JSONObject) statisticRepository.getCache()
            .get(Statistics.REPOSITORY_CACHE_KEY_PREFIX + Statistic.STATISTIC);
    if (null == statistic) {
        LOGGER.log(Level.INFO, "Not found statistic in memcache, ignores sync");

        return;
    }

    final Transaction transaction = statisticRepository.beginTransaction();
    transaction.clearQueryCache(false);
    try {
        // For blog view counter
        statisticRepository.update(Statistic.STATISTIC, statistic);

        // For article view counter
        final Set<String> keys = PageCaches.getKeys();
        final List<String> keyList = new ArrayList<String>(keys);

        final int size = keys.size() > FLUSH_SIZE ? FLUSH_SIZE : keys.size(); // Flush FLUSH_SIZE articles at most
        final List<Integer> idx = CollectionUtils.getRandomIntegers(0, keys.size(), size);

        final Set<String> cachedPageKeys = new HashSet<String>();
        for (final Integer i : idx) {
            cachedPageKeys.add(keyList.get(i));
        }

        for (final String cachedPageKey : cachedPageKeys) {
            final JSONObject cachedPage = PageCaches.get(cachedPageKey);
            if (null == cachedPage) {
                continue;
            }

            final Map<String, String> langs = langPropsService.getAll(Latkes.getLocale());
            if (!cachedPage.optString(PageCaches.CACHED_TYPE)
                    .equals(langs.get(PageTypes.ARTICLE.getLangeLabel()))) { // Cached is not an article page
                continue;
            }

            final int hitCount = cachedPage.optInt(PageCaches.CACHED_HIT_COUNT);
            final String articleId = cachedPage.optString(PageCaches.CACHED_OID);

            final JSONObject article = articleRepository.get(articleId);
            if (null == article) {
                continue;
            }

            LOGGER.log(Level.FINER, "Updating article[id={0}, title={1}] view count",
                    new Object[] { articleId, cachedPage.optString(PageCaches.CACHED_TITLE) });

            final int oldViewCount = article.optInt(Article.ARTICLE_VIEW_COUNT);
            final int viewCount = oldViewCount + hitCount;

            article.put(Article.ARTICLE_VIEW_COUNT, viewCount);

            article.put(Article.ARTICLE_RANDOM_DOUBLE, Math.random()); // Updates random value

            articleRepository.update(articleId, article);

            cachedPage.put(PageCaches.CACHED_HIT_COUNT, 0);

            LOGGER.log(Level.FINER, "Updating article[id={0}, title={1}] view count from [{2}] to [{3}]",
                    new Object[] { articleId, article.optString(Article.ARTICLE_TITLE), oldViewCount,
                            viewCount });
        }

        transaction.commit();

        LOGGER.log(Level.INFO, "Synchronized statistic from cache to repository[statistic={0}]", statistic);
    } catch (final RepositoryException e) {
        if (transaction.isActive()) {
            transaction.rollback();
        }

        LOGGER.log(Level.SEVERE, "Updates statistic failed", e);
    }
}