Example usage for java.util.logging Level FINEST

List of usage examples for java.util.logging Level FINEST

Introduction

In this page you can find the example usage for java.util.logging Level FINEST.

Prototype

Level FINEST

To view the source code for java.util.logging Level FINEST.

Click Source Link

Document

FINEST indicates a highly detailed tracing message.

Usage

From source file:com.l2jfree.mmocore.network.MMOLogger.java

@Override
public void trace(Object message) {
    log(Level.FINEST, String.valueOf(message), null);
}

From source file:org.apache.reef.runtime.azbatch.evaluator.EvaluatorShim.java

private void onStart() {
    LOG.log(Level.FINEST, "Entering EvaluatorShim.onStart().");

    LOG.log(Level.INFO, "Reporting back to the driver with Shim Status = {0}",
            EvaluatorShimProtocol.EvaluatorShimStatus.ONLINE);
    this.evaluatorShimStatusChannel.onNext(EvaluatorShimProtocol.EvaluatorShimStatusProto.newBuilder()
            .setRemoteIdentifier(this.remoteManager.getMyIdentifier()).setContainerId(this.containerId)
            .setStatus(EvaluatorShimProtocol.EvaluatorShimStatus.ONLINE).build());

    LOG.log(Level.FINEST, "Exiting EvaluatorShim.onStart().");
}

From source file:org.openspaces.security.ldap.ActiveDirectorySpringSecurityManager.java

/**
 * Attempts to authenticate the passed {@link UserDetails} object, returning a fully populated
 * {@link Authentication} object (including granted authorities) if successful.
 * <p>/*from www  .  j  a  v a  2 s. co m*/
 * This call will utilioze the Group mapper factory to create a collection of XAP authorities
 * from a collection of "GrantedAuthority" (memberOf in Active Directory). 
 * 
 * @param userDetails The GigaSpaces user details request object
 * @return a fully authenticated object including authorities 
 * @throws AuthenticationException if authentication fails
 */
public Authentication authenticate(UserDetails userDetails) throws AuthenticationException {
    try {
        org.springframework.security.core.Authentication authenticate = authenticationManager
                .authenticate(createAuthenticationRequest(userDetails));
        if (!authenticate.isAuthenticated()) {
            throw new AuthenticationException(
                    "Authentication failed for user [" + userDetails.getUsername() + "]");
        }

        // Create a list to hold granted authorities fetched from Active Directory
        Collection<? extends GrantedAuthority> grantedAuthorities = authenticate.getAuthorities();
        ArrayList<Authority> authoritiesList = new ArrayList<Authority>();

        authoritiesList
                .addAll(GroupMapperAuthorityFactory.create(grantedAuthorities, groupMapper.getGroupMap()));

        if (authoritiesList.size() < 1) {
            throw new AuthenticationException("Authentication failed for user [" + userDetails.getUsername()
                    + "]; User does not belong to any authority");
        }

        User user = new User(userDetails.getUsername(), userDetails.getPassword(),
                authoritiesList.toArray(new Authority[authoritiesList.size()]));
        Authentication authentication = new Authentication(user);
        return authentication;

    } catch (Exception exception) {
        if (logger.isLoggable(Level.FINEST)) {
            logger.log(Level.FINEST, "Caught exception upon authentication: " + exception, exception);
        }
        throw new AuthenticationException(exception);
    }
}

From source file:com.wills.clientproxy.HessianLBProxy.java

/**
 * Handles the object invocation.//from  w ww .j a  va  2 s . c  om
 * 
 * @param proxy
 *            the proxy object to invoke
 * @param method
 *            the method to call
 * @param args
 *            the arguments to the proxy object
 */
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
    String mangleName;

    HessianClusterNode hcn = _cm.getAvailableNodeByStraitegy();
    if (hcn == null) {
        throw new Exception("no available server node found!");
    }
    if (hcn == null || hcn.getNode() == null) {
        throw new Exception("no server available");
    }

    threadLocal.set(new URL(hcn.getURL() + this._type.getSimpleName()));

    try {
        lock.readLock().lock();
        mangleName = _mangleMap.get(method);
    } finally {
        lock.readLock().unlock();
    }

    if (mangleName == null) {
        String methodName = method.getName();
        Class<?>[] params = method.getParameterTypes();
        // equals and hashCode are special cased
        if (methodName.equals("equals") && params.length == 1 && params[0].equals(Object.class)) {
            Object value = args[0];
            if (value == null || !Proxy.isProxyClass(value.getClass()))
                return Boolean.FALSE;

            Object proxyHandler = Proxy.getInvocationHandler(value);

            if (!(proxyHandler instanceof HessianLBProxy))
                return Boolean.FALSE;

            HessianLBProxy handler = (HessianLBProxy) proxyHandler;

            return new Boolean(false);
        } else if (methodName.equals("hashCode") && params.length == 0)
            return new Integer(_cm.hashCode());
        else if (methodName.equals("getHessianType"))
            return proxy.getClass().getInterfaces()[0].getName();
        else if (methodName.equals("getHessianURL"))
            return threadLocal.get().toString();
        else if (methodName.equals("toString") && params.length == 0)
            return "HessianProxy[" + threadLocal.get() + "]";

        if (!_factory.isOverloadEnabled())
            mangleName = method.getName();
        else
            mangleName = mangleName(method);

        try {
            lock.writeLock().lock();
            _mangleMap.put(method, mangleName);
        } finally {
            lock.writeLock().unlock();
        }
    }
    InputStream is = null;
    HessianConnection conn = null;

    try {
        if (log.isLoggable(Level.FINER))
            log.finer("Hessian[" + threadLocal.get() + "] calling " + mangleName);
        conn = sendRequest(mangleName, args, threadLocal.get());

        if (conn.getStatusCode() != 200) {
            throw new HessianProtocolException("http code is " + conn.getStatusCode());
        }

        is = conn.getInputStream();

        if (log.isLoggable(Level.FINEST)) {
            PrintWriter dbg = new PrintWriter(new LogWriter(log));
            HessianDebugInputStream dIs = new HessianDebugInputStream(is, dbg);

            dIs.startTop2();

            is = dIs;
        }

        AbstractHessianInput in;

        int code = is.read();

        if (code == 'H') {
            int major = is.read();
            int minor = is.read();

            in = _factory.getHessian2Input(is);

            Object value = in.readReply(method.getReturnType());

            return value;
        } else if (code == 'r') {
            int major = is.read();
            int minor = is.read();

            in = _factory.getHessianInput(is);

            in.startReplyBody();

            Object value = in.readObject(method.getReturnType());

            if (value instanceof InputStream) {
                value = new ResultInputStream(conn, is, in, (InputStream) value);
                is = null;
                conn = null;
            } else
                in.completeReply();

            return value;
        } else
            throw new HessianProtocolException("'" + (char) code + "' is an unknown code");
    } catch (HessianProtocolException e) {
        throw new HessianRuntimeException(e);
    } finally {
        try {
            if (is != null)
                is.close();
        } catch (Exception e) {
            log.log(Level.FINE, e.toString(), e);
        }

        try {
            if (conn != null)
                conn.destroy();
        } catch (Exception e) {
            log.log(Level.FINE, e.toString(), e);
        }
    }
}

From source file:edu.usu.sdl.openstorefront.service.PersistenceService.java

public <T> T findById(Class<T> entity, Object id) {
    if (id == null) {
        log.log(Level.FINEST, "Id is null so return null");
        return null;
    }/*from   w w  w .j  ava2s. co  m*/

    OObjectDatabaseTx db = getConnection();
    T returnEntity = null;
    try {
        if (checkPkObject(db, entity, id)) {
            Map<String, Object> pkFields = findIdField(entity, id);

            if (pkFields.isEmpty()) {
                throw new OpenStorefrontRuntimeException("Unable to find PK field",
                        "Mark the Primary Key field (@PK) on the entity: " + entity.getName());
            }

            StringBuilder whereClause = new StringBuilder();
            Map<String, Object> parameters = new HashMap<>();
            for (String fieldName : pkFields.keySet()) {
                parameters.put(fieldName.replace(".", PARAM_NAME_SEPARATOR) + "Param", pkFields.get(fieldName));
                whereClause.append(" ").append(fieldName).append(" = :")
                        .append(fieldName.replace(".", PARAM_NAME_SEPARATOR)).append("Param").append(" AND");
            }
            String whereClauseString = whereClause.substring(0, whereClause.length() - 3);

            List<T> results = db.query(
                    new OSQLSynchQuery<>(
                            "select * from " + entity.getSimpleName() + " where " + whereClauseString),
                    parameters);

            if (!results.isEmpty()) {
                returnEntity = results.get(0);
            }
        } else {
            throw new OpenStorefrontRuntimeException("Id passed in doesn't match the PK type of the entity",
                    "Make sure you are passing the right PK");
        }
    } finally {
        closeConnection(db);
    }

    return returnEntity;
}

From source file:com.l2jfree.mmocore.network.MMOLogger.java

@Override
public void trace(Object message, Throwable throwable) {
    log(Level.FINEST, String.valueOf(message), throwable);
}

From source file:Peer.java

@Override
public String lookup(String word, Level logLevel) throws Exception {
    lg.log(Level.FINEST, "lookup Entry");

    // Get the hash for this word
    Key key = hasher.getHash(word);
    lg.log(Level.FINER, " Hashed word " + word + " has key " + key);

    // Get the max key value
    Key max = new Key(BigInteger.valueOf((int) Math.pow(2, hasher.getBitSize()))).pred();

    // If this peer knows the which peer that key belongs to ...
    if (/*from w  ww .  j  a v  a2  s.co m*/
    // Normal ascending range
    pred == null || (key.compare(pred) > 0 && key.compare(nodeid) <= 0)
    // Modulor case
            || (pred.compare(nodeid) > 0 && (key.compare(pred) > 0 && key.compare(max) <= 0)
                    || (key.compare(nodeid) <= 0))) {
        lg.log(logLevel, "(lookup)Peer " + nodeid + " should have word " + word + " with key " + key);

        // Lookup keey 
        if (dict.get(word) != null) {
            lg.log(Level.FINEST, "lookup Exit");
            return dict.get(word);
        } else {
            lg.log(Level.FINEST, "lookup Exit");
            return "Meaning is not found";
        }
    }
    // ... else find next success through finger key.
    else {
        Key closestNode = ft.getClosestSuccessor(key);

        lg.log(logLevel, "(lookup)Peer " + nodeid + " should NOT have word " + word + " with key " + key
                + " ... calling insert on the best finger table match " + closestNode);
        PeerInterface peer = getPeer(closestNode);
        lg.log(Level.FINEST, "lookup Exit");
        return peer.lookup(word, logLevel);
    }
}

From source file:org.geosdi.wps.utility.GeoServerUtils.java

/**
 *
 * @param crismaWorkspace/*  w w w .  j a v a 2s.c  o  m*/
 * @param crismaDatastore
 * @param namespace
 * @param featureName
 * @param styleName
 * @return
 * @throws Exception
 */
public FeatureTypeInfo getOrPublishFeatureType(WorkspaceInfo crismaWorkspace, DataStoreInfo crismaDatastore,
        NamespaceInfo namespace, String featureName, String styleName) throws Exception {

    String newFeatureName = featureName + "_" + crismaDatastore.getName();

    FeatureTypeInfo featureTypeInfo = this.catalog.getFeatureTypeByDataStore(crismaDatastore, newFeatureName);

    LayerInfo layer = this.catalog.getLayerByName(newFeatureName);

    if (featureTypeInfo != null && layer == null) {
        this.catalog.remove(featureTypeInfo);
        featureTypeInfo = null;
    }

    logger.log(Level.INFO, "FeatureTypeInfo: " + featureTypeInfo);

    if (featureTypeInfo == null) {
        logger.log(Level.INFO, "Creating featureTypeInfo");
        featureTypeInfo = this.catalog.getFactory().createFeatureType();

        featureTypeInfo.setStore(crismaDatastore);
        featureTypeInfo.setNamespace(namespace);
        featureTypeInfo.setName(newFeatureName);
        featureTypeInfo.setNativeName(featureName);
        logger.log(Level.INFO, "BUG overriding existing feature type");
        //            this.catalog.detach(featureTypeInfo);
        logger.log(Level.INFO, "FeatureTypeInfo: " + featureTypeInfo);
        this.catalog.add(featureTypeInfo);
        logger.log(Level.INFO, "AFTER Creating featureTypeInfo");

        DataAccess gtda = crismaDatastore.getDataStore(null);
        logger.log(Level.INFO, "crismaDatastore.getDataStore(null): " + crismaDatastore.getDataStore(null));
        if (gtda instanceof DataStore) {
            String typeName = featureTypeInfo.getName();
            if (featureTypeInfo.getNativeName() != null) {
                typeName = featureTypeInfo.getNativeName();
            }
            boolean typeExists = false;
            DataStore gtds = (DataStore) gtda;
            for (String name : gtds.getTypeNames()) {
                logger.log(Level.FINEST, "@@@@@@@@@@@@@@@@@@ Type Names: " + name);
                if (name.equals(typeName)) {
                    typeExists = true;
                    break;
                }
            }

            //check to see if this is a virtual JDBC feature type
            MetadataMap mdm = featureTypeInfo.getMetadata();
            boolean virtual = mdm != null && mdm.containsKey(FeatureTypeInfo.JDBC_VIRTUAL_TABLE);

            if (!virtual && !typeExists) {
                SimpleFeatureType simpleFeatureType = buildFeatureType(featureTypeInfo);
                gtds.createSchema(simpleFeatureType);
                // the attributes created might not match up 1-1 with the actual spec due to
                // limitations of the data store, have it re-compute them
                featureTypeInfo.getAttributes().clear();
                List<String> typeNames = Arrays.asList(gtds.getTypeNames());
                // handle Oracle oddities
                // TODO: use the incoming store capabilites API to better handle the name transformation
                if (!typeNames.contains(typeName) && typeNames.contains(typeName.toUpperCase())) {
                    featureTypeInfo.setNativeName(featureTypeInfo.getName().toLowerCase());
                }
            }

            CatalogBuilder cb = new CatalogBuilder(catalog);
            cb.initFeatureType(featureTypeInfo);

            //attempt to fill in metadata from underlying feature source
            try {
                FeatureSource featureSource = gtda
                        .getFeatureSource(new NameImpl(featureTypeInfo.getNativeName()));
                if (featureSource != null) {
                    cb.setupMetadata(featureTypeInfo, featureSource);
                }
            } catch (Exception e) {
                logger.log(Level.WARNING, "Unable to fill in metadata from underlying feature source", e);
            }

            if (featureTypeInfo.getStore() == null) {
                //get from requests
                featureTypeInfo.setStore(crismaDatastore);
            }

            NamespaceInfo ns = featureTypeInfo.getNamespace();
            if (ns != null && !ns.getPrefix().equals(crismaWorkspace)) {
                //TODO: change this once the two can be different and we untie namespace
                // from workspace
                logger.warning("Namespace: " + ns.getPrefix() + " does not match workspace: " + crismaWorkspace
                        + ", overriding.");
                ns = null;
            }

            if (ns == null) {
                //infer from workspace
                ns = catalog.getNamespaceByPrefix(GeoServerUtils.CRISMA_WORKSPACE);
                featureTypeInfo.setNamespace(ns);
            }

            featureTypeInfo.setEnabled(true);
            catalog.validate(featureTypeInfo, true).throwIfInvalid();
            catalog.add(featureTypeInfo);

            LayerInfo layerInfo = new CatalogBuilder(catalog).buildLayer(featureTypeInfo);
            logger.log(Level.INFO, "Layer Info result: " + layerInfo.toString());
            logger.log(Level.INFO, "Resource Info result: " + layerInfo.getResource());

            layerInfo.setName(newFeatureName);
            layerInfo.setTitle(newFeatureName);
            //create a layer for the feature type
            if (styleName != null) {
                StyleInfo s = catalog.getStyleByName(styleName);
                layerInfo.setDefaultStyle(s);
            }

            catalog.add(layerInfo);
        }
    }
    logger.log(Level.INFO, "Published feature with name: " + featureTypeInfo.getName());
    return featureTypeInfo;
}

From source file:org.apache.reef.io.network.NetworkConnectionServiceTest.java

/**
 * Test NetworkService registering multiple connection factories with Streamingcodec.
 *///from   w  w  w .  ja  va 2 s  . c  o  m
@Test
public void testMultipleConnectionFactoriesStreamingTest() throws Exception {
    LOG.log(Level.FINEST, name.getMethodName());
    runNetworkConnServiceWithMultipleConnFactories(new StreamingStringCodec(), new StreamingIntegerCodec());
}

From source file:com.pivotal.gemfire.tools.pulse.internal.log.PulseLogWriter.java

@Override
public boolean finestEnabled() {
    return logger.isLoggable(Level.FINEST);
}