Example usage for java.util.logging Level FINEST

List of usage examples for java.util.logging Level FINEST

Introduction

In this page you can find the example usage for java.util.logging Level FINEST.

Prototype

Level FINEST

To view the source code for java.util.logging Level FINEST.

Click Source Link

Document

FINEST indicates a highly detailed tracing message.

Usage

From source file:org.schemaspy.Config.java

/**
 * Set the level of logging to perform.<p/>
 * The levels in descending order are:/*from  w w w .j  a  va2s  . c  o  m*/
 * <ul>
 * <li><code>severe</code> (highest - least detail)
 * <li><code>warning</code> (default)
 * <li><code>info</code>
 * <li><code>config</code>
 * <li><code>fine</code>
 * <li><code>finer</code>
 * <li><code>finest</code>  (lowest - most detail)
 * </ul>
 *
 * @param logLevel
 */
public void setLogLevel(String logLevel) {
    if (logLevel == null) {
        this.logLevel = Level.WARNING;
        return;
    }

    Map<String, Level> levels = new LinkedHashMap<>();
    levels.put("severe", Level.SEVERE);
    levels.put("warning", Level.WARNING);
    levels.put("info", Level.INFO);
    levels.put("config", Level.CONFIG);
    levels.put("fine", Level.FINE);
    levels.put("finer", Level.FINER);
    levels.put("finest", Level.FINEST);

    this.logLevel = levels.get(logLevel.toLowerCase());
    if (this.logLevel == null) {
        throw new InvalidConfigurationException(
                "Invalid logLevel: '" + logLevel + "'. Must be one of: " + levels.keySet());
    }
}

From source file:com.archivas.clienttools.arcutils.impl.adapter.Hcp3AuthNamespaceAdapter.java

private boolean doesNamespaceHavePermissions() throws StorageAdapterException {

    InputStream iStream = null;//from w  ww.j  a  v  a  2s .  co m
    try {
        SAXBuilder builder = new SAXBuilder(false);
        iStream = getInputStream(PROC_PATH, false);
        Document procXML = builder.build(iStream);
        LOG.log(Level.FINEST, "procXML=" + procXML);
        close(); // Done with that stream, close

        Element procElement = procXML.getRootElement();
        return doesNamespaceHavePermissions(procElement, getProfile().getNamespace());

    } catch (IOException e) {
        handleIOExceptionFromRequest(e, "accessing namespace info for", getHost()); // throws an
                                                                                    // exception
    } catch (JDOMException e) {
        String errMsg = "Exception parsing proc data for archive with profile: " + getProfile();
        LOG.log(Level.WARNING, errMsg, e);
        throw new StorageAdapterLiteralException(errMsg, e);
    } finally {
        RuntimeException e = null;
        try {
            if (iStream != null) {
                iStream.close();
            }
        } catch (IOException io) {
            LOG.log(Level.FINE, "IOException closing during getProc", io);
        } catch (RuntimeException ex) {
            e = ex;
        }

        close();

        if (e != null) {
            throw e;
        }
    }
    // handleIOExceptionFromRequest() should have thrown, but if its implementation ever changes
    // and it doesn't,
    // throw
    throw new RuntimeException("Error determining if the profile has permissions to the requested namespace");
}

From source file:org.apache.oodt.cas.filemgr.system.XmlRpcFileManagerClient.java

public String ingestProduct(Product product, Metadata metadata, boolean clientTransfer)
        throws VersioningException, XmlRpcException, FileManagerException {
    try {//  ww w. j  ava2s  . c  o  m
        // ingest product
        Vector<Object> argList = new Vector<Object>();
        Map<String, Object> productHash = XmlRpcStructFactory.getXmlRpcProduct(product);
        argList.add(productHash);
        argList.add(metadata.getHashTable());
        argList.add(clientTransfer);
        String productId = (String) client.execute("filemgr.ingestProduct", argList);

        if (clientTransfer) {
            LOG.log(Level.FINEST, "File Manager Client: clientTransfer enabled: " + "transfering product ["
                    + product.getProductName() + "]");

            // we need to transfer the product ourselves
            // make sure we have the product ID
            if (productId == null) {
                throw new Exception("Request to ingest product: " + product.getProductName()
                        + " but no product ID returned from File " + "Manager ingest");
            }

            if (dataTransfer == null) {
                throw new Exception("Request to ingest product: [" + product.getProductName()
                        + "] using client transfer, but no " + "dataTransferer specified!");
            }

            product.setProductId(productId);

            if (!Boolean.getBoolean("org.apache.oodt.cas.filemgr.serverside.versioning")) {
                // version the product
                Versioner versioner = GenericFileManagerObjectFactory
                        .getVersionerFromClassName(product.getProductType().getVersioner());
                if (versioner != null) {
                    versioner.createDataStoreReferences(product, metadata);
                }

                // add the newly versioned references to the data store
                try {
                    addProductReferences(product);
                } catch (CatalogException e) {
                    LOG.log(Level.SEVERE,
                            "ingestProduct: RepositoryManagerException "
                                    + "when adding Product References for Product : " + product.getProductName()
                                    + " to RepositoryManager: Message: " + e);
                    throw e;
                }
            } else {
                product.setProductReferences(getProductReferences(product));
            }

            // now transfer the product
            try {
                dataTransfer.transferProduct(product);
                // now update the product's transfer status in the data
                // store
                product.setTransferStatus(Product.STATUS_RECEIVED);

                try {
                    setProductTransferStatus(product);
                } catch (CatalogException e) {
                    LOG.log(Level.SEVERE,
                            "ingestProduct: RepositoryManagerException "
                                    + "when updating product transfer status for Product: "
                                    + product.getProductName() + " Message: " + e);
                    throw e;
                }
            } catch (Exception e) {
                LOG.log(Level.SEVERE, "ingestProduct: DataTransferException when transfering Product: "
                        + product.getProductName() + ": Message: " + e);
                throw new DataTransferException(e);
            }

        }
        return productId;

        // error versioning file
    } catch (VersioningException e) {
        LOG.log(Level.SEVERE, e.getMessage());
        LOG.log(Level.SEVERE,
                "ingestProduct: VersioningException when versioning Product: " + product.getProductName()
                        + " with Versioner " + product.getProductType().getVersioner() + ": Message: " + e);
        throw new VersioningException(e);
    } catch (XmlRpcException e2) {
        LOG.log(Level.SEVERE, "Failed to ingest product [ name:" + product.getProductName() + "] :"
                + e2.getMessage() + " -- rolling back ingest");
        try {
            Vector<Object> argList = new Vector<Object>();
            Map<String, Object> productHash = XmlRpcStructFactory.getXmlRpcProduct(product);
            argList.add(productHash);
            client.execute("filemgr.removeProduct", argList);
        } catch (Exception e1) {
            LOG.log(Level.SEVERE,
                    "Failed to rollback ingest of product [" + product + "] : " + e2.getMessage());
        }
        throw e2;
    } catch (Exception e) {
        LOG.log(Level.SEVERE, "Failed to ingest product [ id: " + product.getProductId() + "/ name:"
                + product.getProductName() + "] :" + e + " -- rolling back ingest");
        try {
            Vector<Object> argList = new Vector<Object>();
            Map<String, Object> productHash = XmlRpcStructFactory.getXmlRpcProduct(product);
            argList.add(productHash);
            client.execute("filemgr.removeProduct", argList);
        } catch (Exception e1) {
            LOG.log(Level.SEVERE, "Failed to rollback ingest of product [" + product + "] : " + e);
        }
        throw new FileManagerException("Failed to ingest product [" + product + "] : " + e);
    }

}

From source file:org.apache.myfaces.application.ApplicationImpl.java

@Override
public final void addConverter(final Class<?> targetClass, final String converterClass) {
    checkNull(targetClass, "targetClass");
    checkNull(converterClass, "converterClass");
    checkEmpty(converterClass, "converterClass");

    try {// ww w  .j a  va 2 s .c  o m
        if (isLazyLoadConfigObjects()) {
            _converterTargetClassToConverterClassMap.put(targetClass, converterClass);
        } else {
            _converterTargetClassToConverterClassMap.put(targetClass,
                    ClassUtils.simpleClassForName(converterClass));
        }

        if (log.isLoggable(Level.FINEST)) {
            log.finest("add Converter for class = " + targetClass + " converterClass = " + converterClass);
        }
    } catch (Exception e) {
        log.log(Level.SEVERE, "Converter class " + converterClass + " not found", e);
    }
}

From source file:io.hops.hopsworks.common.security.CertificateMaterializer.java

private void scheduleFileRemover(MaterialKey key, String materializationDirectory) {
    LocalFileRemover fileRemover = new LocalFileRemover(key, materialCache.get(key), materializationDirectory);
    fileRemover.scheduledFuture = scheduler.schedule(fileRemover, DELAY_VALUE, DELAY_TIMEUNIT);

    Map<String, LocalFileRemover> materialRemovesForKey = fileRemovers.get(key);
    if (materialRemovesForKey != null) {
        materialRemovesForKey.put(materializationDirectory, fileRemover);
    } else {/*from  www  .j a  v a  2 s  .c  o m*/
        materialRemovesForKey = new HashMap<>();
        materialRemovesForKey.put(materializationDirectory, fileRemover);
        fileRemovers.put(key, materialRemovesForKey);
    }

    LOG.log(Level.FINEST, "Scheduled local file removal for <" + key.getExtendedUsername() + ">");
}

From source file:com.archivas.clienttools.arcutils.impl.adapter.HCAPAdapter.java

protected InputStream getInputStream(final String path, boolean resolvePath) throws StorageAdapterException {
    LOG.log(Level.FINEST, "getHost()=" + getHost() + ", profile=" + getProfile().toDetailString());
    return getInputStream(getHost(), path, resolvePath);
}

From source file:org.apache.myfaces.application.ApplicationImpl.java

@Override
public final void addValidator(final String validatorId, final String validatorClass) {
    checkNull(validatorId, "validatorId");
    checkEmpty(validatorId, "validatorId");
    checkNull(validatorClass, "validatorClass");
    checkEmpty(validatorClass, "validatorClass");

    try {//from  w  w  w  .  j ava  2  s.c  om
        if (isLazyLoadConfigObjects()) {
            _validatorClassMap.put(validatorId, validatorClass);
        } else {
            _validatorClassMap.put(validatorId, ClassUtils.simpleClassForName(validatorClass));
        }

        if (log.isLoggable(Level.FINEST)) {
            log.finest("add Validator id = " + validatorId + " class = " + validatorClass);
        }
    } catch (Exception e) {
        log.log(Level.SEVERE, "Validator class " + validatorClass + " not found", e);
    }
}

From source file:com.archivas.clienttools.arcutils.impl.adapter.HCAPAdapter.java

protected InputStream getInputStream(final String path, final String query, Header contentTypeHeader)
        throws StorageAdapterException {
    LOG.log(Level.FINEST, "getHost()=" + getHost() + ", profile=" + getProfile().toDetailString());
    return getInputStream(getHost(), path, query, true, contentTypeHeader);
}

From source file:com.ibm.sbt.services.client.ClientService.java

/**
 * Execute an XML Http request with the specified arguments
 * /*  w  w w.j av  a2  s  . com*/
 * @param method
 * @param args
 * @param content
 * @return
 * @throws ClientServicesException
 */
public Response xhr(String method, Args args, Object content) throws ClientServicesException {
    if (logger.isLoggable(Level.FINEST)) {
        logger.entering(sourceClass, "xhr", new Object[] { method, args });
    }

    // notify listener
    if (!notifyListener(method, args, content)) {
        return null;
    }

    checkAuthentication(args);
    checkUrl(args);
    checkReadParameters(args.parameters);
    String url = composeRequestUrl(args);
    Response response = null;
    if (StringUtil.equalsIgnoreCase(method, METHOD_GET)) {
        HttpGet httpGet = new HttpGet(url);
        response = execRequest(httpGet, args, content);
    } else if (StringUtil.equalsIgnoreCase(method, METHOD_POST)) {
        HttpPost httpPost = new HttpPost(url);
        response = execRequest(httpPost, args, content);
    } else if (StringUtil.equalsIgnoreCase(method, METHOD_PUT)) {
        HttpPut httpPut = new HttpPut(url);
        response = execRequest(httpPut, args, content);
    } else if (StringUtil.equalsIgnoreCase(method, METHOD_DELETE)) {
        HttpDelete httpDelete = new HttpDelete(url);
        response = execRequest(httpDelete, args, content);
    } else if (StringUtil.equalsIgnoreCase(method, METHOD_DELETE_BODY)) {
        HttpDeleteWithBody httpDelete = new HttpDeleteWithBody(url);
        response = execRequest(httpDelete, args, content);
    } else {
        throw new ClientServicesException(null, "Unsupported HTTP method {0}", method);
    }

    // notify listener
    response = notifyListener(method, args, content, response);

    if (logger.isLoggable(Level.FINEST)) {
        logger.exiting(sourceClass, "xhr", response);
    }
    return response;
}

From source file:com.archivas.clienttools.arcutils.impl.adapter.Hcp3AuthNamespaceAdapter.java

protected boolean isVersioningEnabled(String procPath) throws StorageAdapterException {

    InputStream iStream = null;//from ww w  .  java 2 s .  c o  m
    try {
        SAXBuilder builder = new SAXBuilder(false);
        iStream = getInputStream(procPath, false);
        Document procXML = builder.build(iStream);
        LOG.log(Level.FINEST, "procXML=" + procXML);
        close(); // Done with that stream, close

        Element procElement = procXML.getRootElement();
        return isVersioningEnabled(procElement, getProfile().getNamespace());

    } catch (IOException e) {
        handleIOExceptionFromRequest(e, "accessing namespace info for", getHost()); // throws an
                                                                                    // exception
    } catch (JDOMException e) {
        String errMsg = "Exception parsing proc data for archive with profile: " + getProfile();
        LOG.log(Level.WARNING, errMsg, e);
        throw new StorageAdapterLiteralException(errMsg, e);
    } finally {
        RuntimeException e = null;
        try {
            if (iStream != null) {
                iStream.close();
            }
        } catch (IOException io) {
            LOG.log(Level.FINE, "IOException closing during getProc", io);
        } catch (RuntimeException ex) {
            e = ex;
        }

        close();

        if (e != null) {
            throw e;
        }
    }
    // handleIOExceptionFromRequest() should have thrown, but if its implementation ever changes
    // and it doesn't,
    // throw
    throw new RuntimeException("Error determining if versioning is enabled");
}