Example usage for java.lang Boolean booleanValue

List of usage examples for java.lang Boolean booleanValue

Introduction

In this page you can find the example usage for java.lang Boolean booleanValue.

Prototype

@HotSpotIntrinsicCandidate
public boolean booleanValue() 

Source Link

Document

Returns the value of this Boolean object as a boolean primitive.

Usage

From source file:com.redhat.rhn.manager.monitoring.ModifyFilterCommand.java

/**
 * Update the criteria related to pattern matching on filter output. If
 * <code>match</code> is <code>null</code>, all match related criteria
 * will be removed. If it is non-null, all match related criteria are
 * replaced by one criterion that matches on the given <code>match</code>
 * string.//  w ww . j  a  v a  2  s.  c om
 * @param pattern the regex pattern to match against. This must be a valid
 * Perl5 regex.
 * @param matchCase whether the regex match should be case-sensitive or not
 */
public void updateMatch(String pattern, Boolean matchCase) {
    String[] values = new String[] { pattern };
    if (StringUtils.isBlank(pattern)) {
        values = new String[0];
    }
    boolean withCase = matchCase == null ? false : matchCase.booleanValue();
    MatchType mt = withCase ? MatchType.REGEX_CASE : MatchType.REGEX;
    updateCriteria(mt, values);
}

From source file:org.codehaus.groovy.grails.plugins.searchable.compass.search.DefaultStringQuerySearchableCompassQueryBuilder.java

public CompassQuery buildQuery(GrailsApplication grailsApplication, CompassSession compassSession, Map options,
        Object query) {//www.  ja  v  a 2s. co m
    Assert.notNull(query, "query cannot be null");
    Assert.isInstanceOf(String.class, query,
            "query must be a String but is [" + query.getClass().getName() + "]");

    String analyzer = (String) getOption(ANALYZER_NAMES, options);
    String parser = (String) getOption(PARSER_NAMES, options);
    String defaultSearchProperty = (String) getOption(DEFAULT_PROPERTY_NAMES, options);
    Collection properties = (Collection) getOption(PROPERTIES_NAMES, options);
    Boolean useAndDefaultOperator = (Boolean) getOption(USE_AND_DEFAULT_OPERATOR_NAMES, options);
    Boolean escape = MapUtils.getBoolean(options, "escape", Boolean.FALSE);

    Assert.isTrue(!(properties != null && defaultSearchProperty != null),
            "The " + DefaultGroovyMethods.join(DEFAULT_PROPERTY_NAMES, "/") + " and "
                    + DefaultGroovyMethods.join(PROPERTIES_NAMES, "/") + " options cannot be combined");

    String queryString = (String) query;
    if (escape.booleanValue()) {
        queryString = CompassQueryParser.escape(queryString);
    }

    CompassQueryBuilder compassQueryBuilder = compassSession.queryBuilder();
    CompassQueryBuilder.ToCompassQuery stringBuilder;
    if (properties != null && !properties.isEmpty()) {
        stringBuilder = compassQueryBuilder.multiPropertyQueryString(queryString);
        for (Iterator iter = properties.iterator(); iter.hasNext();) {
            ((CompassQueryBuilder.CompassMultiPropertyQueryStringBuilder) stringBuilder)
                    .add((String) iter.next());
        }
    } else {
        stringBuilder = compassQueryBuilder.queryString(queryString);
    }

    if (analyzer != null) {
        InvokerHelper.invokeMethod(stringBuilder, "setAnalyzer", analyzer);
    }
    if (parser != null) {
        InvokerHelper.invokeMethod(stringBuilder, "setQueryParser", parser);
    }
    if (defaultSearchProperty != null) {
        InvokerHelper.invokeMethod(stringBuilder, "setDefaultSearchProperty", defaultSearchProperty);
    }
    if (useAndDefaultOperator != null && useAndDefaultOperator.booleanValue()) {
        InvokerHelper.invokeMethod(stringBuilder, "useAndDefaultOperator", null);
    }
    return stringBuilder.toQuery();
}

From source file:it.doqui.index.ecmengine.business.job.model.CustomModelActivationJob.java

private void processModel(final RepoModelDefinition model) throws Exception {
    logger.debug("[CustomModelActivationJob::processModel] BEGIN");
    try {//from  w ww .j  a  v a 2  s  .  co  m
        if (logger.isDebugEnabled()) {
            logger.debug("[CustomModelActivationJob::processModel] processing model: " + model.getRepoName());
            logger.debug("[CustomModelActivationJob::processModel] current model: " + model.toString());
        }

        String modelFileName = model.getRepoName();
        StoreRef storeRef = repoModelsLocation.getStoreRef();

        // MB: Provo a prendere il root note. Nei Tenant in costruzione non e' ancora creato
        NodeRef rootNode = null;
        try {
            rootNode = nodeService.getRootNode(storeRef);
        } catch (Exception e) {
            logger.error("[CustomModelActivationJob::processModel] root node non trovato " + storeRef);
        }

        // MB: controllo l'esistenza del root node
        if (rootNode != null) {
            List<NodeRef> nodeRefs = searchService.selectNodes(rootNode, repoModelsLocation.getPath()
                    + "//.[@cm:name='" + modelFileName + "' and subtypeOf('cm:dictionaryModel')]", null,
                    namespaceService, false);

            // Verifico quanti model vengono trovati
            if (nodeRefs.size() == 0) {
                logger.error("[CustomModelActivationJob::processModel] Could not find custom model "
                        + modelFileName);
            } else if (nodeRefs.size() > 1) {
                // unexpected: should not find multiple nodes with same name
                logger.error("[CustomModelActivationJob::processModel] Found multiple custom models "
                        + modelFileName);
            } else {
                NodeRef modelNodeRef = nodeRefs.get(0);

                boolean isActive = false;
                Boolean value = (Boolean) nodeService.getProperty(modelNodeRef, ContentModel.PROP_MODEL_ACTIVE);
                if (value != null) {
                    isActive = value.booleanValue();
                }
                // Note: dictionaryModelType.onContentUpdate() generates a refresh event on
                //       DictionaryDAO in order to load models.
                if (model.getModel() == null && isActive) {
                    logger.debug("[CustomModelActivationJob::processModel] model " + modelFileName
                            + " is not active: activating...");
                    dictionaryModelType.onContentUpdate(modelNodeRef, true);
                    logger.debug(
                            "[CustomModelActivationJob::processModel] model " + modelFileName + " activated");
                } else if (model.getModel() != null && !isActive) {
                    logger.debug("[CustomModelActivationJob::processModel] model " + modelFileName
                            + " is active: deactivating...");
                    dictionaryModelType.onContentUpdate(modelNodeRef, true);
                    logger.debug(
                            "[CustomModelActivationJob::processModel] model " + modelFileName + " deactivated");
                }
            }
        }
    } catch (Exception e) {
        logger.error("[CustomModelActivationJob::processModel] ERROR", e);
    } finally {
        logger.debug("[CustomModelActivationJob::processModel] END");
    }
}

From source file:com.eviware.soapui.impl.wsdl.support.wsdl.WsdlImporter.java

public WsdlInterface[] importWsdl(WsdlProject project, String wsdlUrl) throws Exception {
    WsdlContext wsdlContext = new WsdlContext(wsdlUrl, SoapVersion.Soap11, null, null);

    wsdlContext.load();//from www  .j  a  v a  2  s  .  com

    List<Interface> result = new ArrayList<Interface>();

    Definition definition = wsdlContext.getDefinition();
    Map bindingMap = definition.getBindings();
    if (bindingMap.isEmpty()) {
        Map serviceMap = definition.getServices();
        if (serviceMap.isEmpty())
            throw new RuntimeException("Missing bindings and services in [" + wsdlUrl + "]");

        Iterator i = serviceMap.values().iterator();
        while (i.hasNext()) {
            Service service = (Service) i.next();
            Map portMap = service.getPorts();
            Iterator i2 = portMap.values().iterator();
            while (i2.hasNext()) {
                Port port = (Port) i2.next();

                Binding binding = port.getBinding();

                WsdlInterface ifc = (WsdlInterface) project
                        .getInterfaceByName(binding.getPortType().getQName().getLocalPart());
                if (ifc != null) {
                    Boolean res = UISupport.confirmOrCancel(
                            "Interface [" + ifc.getName() + "] already exists in project, update instead?",
                            "Import WSDL");
                    if (res == null)
                        return new WsdlInterface[0];

                    if (res.booleanValue()) {
                        ifc.updateDefinition(wsdlUrl, false);
                    }

                    continue;
                }

                WsdlInterface iface = importBinding(project, wsdlContext, binding);
                if (iface != null) {
                    SOAPAddress address = (SOAPAddress) WsdlUtils
                            .getExtensiblityElement(port.getExtensibilityElements(), SOAPAddress.class);
                    if (address != null)
                        iface.addEndpoint(address.getLocationURI());

                    result.add(iface);
                    iface.setWsdlContext(wsdlContext);
                }
            }
        }
    } else {
        Iterator i = bindingMap.values().iterator();
        while (i.hasNext()) {
            Binding binding = (Binding) i.next();

            WsdlInterface ifc = (WsdlInterface) project
                    .getInterfaceByName(binding.getPortType().getQName().getLocalPart());
            if (ifc != null && result.indexOf(ifc) == -1) {
                Boolean res = UISupport.confirmOrCancel(
                        "Interface [" + ifc.getName() + "] already exists in project, update instead?",
                        "Import WSDL");
                if (res == null)
                    return new WsdlInterface[0];

                if (res.booleanValue()) {
                    ifc.updateDefinition(wsdlUrl, false);
                }

                continue;
            }

            WsdlInterface iface = importBinding(project, wsdlContext, binding);
            if (iface != null) {
                result.add(iface);
                iface.setWsdlContext(wsdlContext);
            }
        }
    }

    return result.toArray(new WsdlInterface[result.size()]);
}

From source file:er.extensions.eof.ERXModelGroup.java

public static boolean _isKeyEnumOverriden(EOAttribute att, int key) {
    boolean result = false;
    if (att.prototype() != null) {
        Map characteristics = (Map) NSKeyValueCoding.Utility.valueForKey(att, "overwrittenCharacteristics");
        for (Iterator iter = characteristics.entrySet().iterator(); iter.hasNext();) {
            Map.Entry element = (Map.Entry) iter.next();
            String charateristic = element.getKey().toString();
            Boolean value = ((Boolean) element.getValue());
            if (charateristic.equalsIgnoreCase(_keyForEnum(key).toString())) {
                return value.booleanValue();
            }/*  www  . ja va  2s.  co  m*/
        }
    }
    return result;
}

From source file:com.sun.portal.rssportlet.SettingsHandler.java

/** Get a portlet preference as a boolean. */
private boolean getBooleanPreference(String key, boolean def) {
    Boolean b = Boolean.valueOf(getPortletPreferences().getValue(key, "false"));
    return b.booleanValue();
}

From source file:com.t3.model.Token.java

/**
 * Get a boolean value from the map or return the default value
 * //w ww . j  ava 2 s. co  m
 * @param map
 *            Get the value from this map
 * @param propName
 *            The name of the property being read.
 * @param defaultValue
 *            The value for the property if it is not set in the map.
 * @return The value for the passed property
 */
private static boolean getBoolean(Map<String, Object> map, String propName, boolean defaultValue) {
    Boolean bool = (Boolean) map.get(propName);
    if (bool == null)
        return defaultValue;
    return bool.booleanValue();
}

From source file:fr.mailjet.rest.impl.UserRESTServiceImpl.java

@Override
public String domainStatus(EnumReturnType parType, String parDomainName, Boolean parCheck)
        throws UniformInterfaceException, IllegalArgumentException {
    MultivaluedMap<String, String> locParameters = this.createHTTPProperties(parType);
    if (StringUtils.isEmpty(parDomainName))
        throw new IllegalArgumentException();
    locParameters.putSingle(_domainAdd, parDomainName);
    if (parCheck != null) {
        int locRealValue = parCheck.booleanValue() ? 1 : 0;
        locParameters.putSingle("check", Integer.valueOf(locRealValue).toString());
    }//from w  ww.j  av a2s .  co m
    return this.createPOSTRequest("userDomainstatus", locParameters);
}

From source file:com.centeractive.ws.server.AbstractCooperationTest.java

private void testOperation(SoapBuilder builder, Binding binding, BindingOperation operation, String url,
        int testServiceId, Boolean postSoapAction) throws Exception {
    OperationWrapper wrapper = builder.getOperation(binding, operation);
    log.info("Testing operation: " + wrapper);
    String request = builder.buildSoapMessageFromInput(wrapper);
    String contextPath = TestUtils.formatContextPath(testServiceId, binding);
    String endpointUrl = formatEndpointAddress(url, contextPath);

    String response = null;/*  w  ww.  j  av a 2  s  .com*/
    if (postSoapAction.booleanValue()) {
        String soapAction = SoapBuilder.getSOAPActionUri(operation);
        response = postRequest(endpointUrl, request, soapAction);
    } else {
        response = postRequest(endpointUrl, request);
    }

    if (SoapBuilder.isRequestResponseOperation(operation)) {
        String expectedResponse = builder.buildSoapMessageFromOutput(builder.getOperation(binding, operation));
        boolean identical = XmlTestUtils.isIdenticalNormalizedWithoutValues(expectedResponse, response);
        assertTrue("Error during validation of service " + testServiceId, identical);
    }
}

From source file:gov.nih.nci.grididloader.BigIdCreator.java

/**
 * Create Big Id's for each entity and save them into the database.
 * Each entity is updated in parallel by several threads, but the entities
 * are processed in a serial fashion./*  ww  w  .  j ava  2  s .  c  o m*/
 */
public void createAndUpdate() throws Exception {

    if (hiFactory.getSystemType() == HandleInterfaceType.CLASSIC) {
        // Create site handle, if the database is empty.
        // This is necessary because otherwise 50 threads will try to create it
        // at once, resulting in duplicates and a subsequent avalanche of collisions
        final HandleRepositoryIDInterface idSvc = (HandleRepositoryIDInterface) hiFactory.getHandleInterface();
        // create dummy id (also creates site handle)
        ResourceIdInfo rid = new ResourceIdInfo(new URI("urn://ncicb"), "dummy");
        idSvc.createOrGetGlobalID(rid);
        // remove the id we created, the site handle will remain
        idSvc.removeGlobalID(rid);
    }

    Connection conn = null;
    FileWriter benchmarkFile = null;

    try {
        benchmarkFile = new FileWriter("timings.txt");
        conn = dataSource.getConnection();

        for (BigEntity entity : config.getEntities()) {

            final String className = entity.getClassName();
            if (!classFilter.isEmpty() && ((include && !classFilter.contains(className))
                    || (!include && classFilter.contains(className)))) {
                System.err.println("Filtered out " + className);
                continue;
            }

            long start = System.currentTimeMillis();

            final String table = entity.getTableName();
            final String id = entity.getPrimaryKey();

            Statement stmt = null;
            ResultSet rs = null;
            long numRows = 0;
            long minId = 0;
            long maxId = 0;

            try {
                // get number of rows and id space for the current entity
                stmt = conn.createStatement();
                rs = stmt.executeQuery(
                        "SELECT MIN(" + id + ") minId, MAX(" + id + ") maxId, COUNT(*) rowCount FROM " + table);
                rs.next();
                numRows = rs.getLong("rowCount");
                minId = rs.getLong("minId");
                maxId = rs.getLong("maxId");
            } catch (SQLException e) {
                System.err.println("Error processing " + table);
                e.printStackTrace();
                continue;
            } finally {
                try {
                    if (rs != null)
                        rs.close();
                    if (stmt != null)
                        stmt.close();
                } catch (SQLException e) {
                    e.printStackTrace();
                }
            }

            /* This is an overly complicated formula to figure out the best 
             * chunk size possible. 
             * 
             * First we determine the idealChunkSize for the amount of rows
             * we are dealing with, based on a linear step equation:
             *10000|   ______
             * 9500|   :
             *     |  /:
             *     | / :
             * 500 |/  :
             * ____|___:_____
             *     0   500,000
             *          
             * In other words, the minimum chunk is 500. As the number of rows 
             * increases, the chunk size grows up to 9500. But after 500000 
             * rows, the chunk size jumps to 10000 and stays constant so that 
             * we don't overload each thread. Therefore, the chunk size is 
             * always between 500 and 10000. 
             * 
             * Secondly, the identifier spread is calculated and multiplied by 
             * the idealChunkSize to get the final chunkSize. If the ids are 
             * equal to the row numbers, the spread is 1 and the chunk size is 
             * ok. If, however, the id space is gigantic, then the chunk size 
             * will be increased proportionally to the average distance between
             * ids (assuming the ids are uniformally distributed).  
             *  
             * This actually works perfectly only if the ids ARE uniformally
             * distributed. In other corner cases, where the ids are clustered
             * together within a huge id space, the id space must be
             * partitioned recursively. 
             */
            final float idealChunkSize = (numRows > 500000) ? 10000 : .018f * numRows + 500;
            final float spread = (float) (maxId - minId + 1) / (float) numRows;
            final long chunkSize = Math.round(idealChunkSize * spread);

            System.out.println("Processing " + entity + " (" + entity.getTableName() + ") rows(" + numRows
                    + ") range(" + minId + "," + maxId + ") parallel(" + entity.isParallelLoadable() + ")");
            System.out.println("Parameters: spread(" + spread + ") chunkSize(ideal=" + idealChunkSize
                    + " actual=" + chunkSize + ")");

            final Map<BatchUpdate, Future<Boolean>> futures = new HashMap<BatchUpdate, Future<Boolean>>();
            final Queue<BatchUpdate> updates = new LinkedList<BatchUpdate>();

            // start each chunk as a task on the executor
            for (long i = minId; i <= maxId; i += chunkSize) {
                BatchUpdate update = new BatchUpdate(dataSource, hiFactory, entity, i, i + chunkSize - 1);
                updates.add(update);

                Future<Boolean> future = entity.isParallelLoadable() ? parallelExecutor.submit(update)
                        : serialExecutor.submit(update);

                futures.put(update, future);
            }

            // wait for all updates to finish
            while (!updates.isEmpty()) {
                final BatchUpdate update = updates.remove();
                final Future<Boolean> future = futures.remove(update);
                try {
                    // this get() blocks until the future is available
                    Boolean success = future.get();
                    if (success == null || !success.booleanValue()) {
                        System.err.println("FAILED: " + update);
                    } else {
                        int n = update.getNumUpdated();
                        if (n == 0) {
                            System.out.println("  done " + update + " (no rows found)");
                        } else {
                            int ut = (int) update.getAverageUpdateTime();
                            int ht = (int) update.getAverageHandleTime();
                            System.out.println("  done " + update + " rows(" + n + " rows) avg(handle=" + ht
                                    + "ms, update=" + ut + "ms)");
                        }
                    }
                } catch (ExecutionException e) {
                    System.err.println("Updated failed for entity: " + entity);
                    e.printStackTrace();
                } catch (InterruptedException e) {
                    System.err.println("Updated failed for entity: " + entity);
                    e.printStackTrace();
                }
            }

            float time = System.currentTimeMillis() - start;
            System.out.println("Done " + entity + " (" + (time / 1000) + " sec)\n");
            benchmarkFile.write(entity.getClassName() + "\t" + numRows + "\t" + time + "\n");
            benchmarkFile.flush();
        }

    } finally {
        try {
            if (conn != null)
                conn.close();
            if (benchmarkFile != null)
                benchmarkFile.close();
        } catch (SQLException e) {
            e.printStackTrace();
        }
    }

    // Done 
    parallelExecutor.shutdown();
    serialExecutor.shutdown();
}