Example usage for org.apache.commons.lang ArrayUtils isEmpty

List of usage examples for org.apache.commons.lang ArrayUtils isEmpty

Introduction

In this page you can find the example usage for org.apache.commons.lang ArrayUtils isEmpty.

Prototype

public static boolean isEmpty(boolean[] array) 

Source Link

Document

Checks if an array of primitive booleans is empty or null.

Usage

From source file:org.apache.ranger.common.RangerSearchUtil.java

public SearchFilter getSearchFilter(HttpServletRequest request, List<SortField> sortFields) {
    if (request == null) {
        return null;
    }/* w w  w .  jav a 2  s  .  c  o  m*/

    SearchFilter ret = new SearchFilter();

    if (MapUtils.isEmpty(request.getParameterMap())) {
        ret.setParams(new HashMap<String, String>());
    }

    ret.setParam(SearchFilter.SERVICE_TYPE, request.getParameter(SearchFilter.SERVICE_TYPE));
    ret.setParam(SearchFilter.SERVICE_TYPE_ID, request.getParameter(SearchFilter.SERVICE_TYPE_ID));
    ret.setParam(SearchFilter.SERVICE_NAME, request.getParameter(SearchFilter.SERVICE_NAME));
    ret.setParam(SearchFilter.SERVICE_NAME_PARTIAL, request.getParameter(SearchFilter.SERVICE_NAME_PARTIAL));
    ret.setParam(SearchFilter.SERVICE_ID, request.getParameter(SearchFilter.SERVICE_ID));
    ret.setParam(SearchFilter.POLICY_NAME, request.getParameter(SearchFilter.POLICY_NAME));
    ret.setParam(SearchFilter.POLICY_NAME_PARTIAL, request.getParameter(SearchFilter.POLICY_NAME_PARTIAL));
    ret.setParam(SearchFilter.POLICY_ID, request.getParameter(SearchFilter.POLICY_ID));
    ret.setParam(SearchFilter.IS_ENABLED, request.getParameter(SearchFilter.IS_ENABLED));
    ret.setParam(SearchFilter.IS_RECURSIVE, request.getParameter(SearchFilter.IS_RECURSIVE));
    ret.setParam(SearchFilter.USER, request.getParameter(SearchFilter.USER));
    ret.setParam(SearchFilter.GROUP, request.getParameter(SearchFilter.GROUP));
    ret.setParam(SearchFilter.POL_RESOURCE, request.getParameter(SearchFilter.POL_RESOURCE));
    ret.setParam(SearchFilter.RESOURCE_SIGNATURE, request.getParameter(SearchFilter.RESOURCE_SIGNATURE));
    ret.setParam(SearchFilter.POLICY_TYPE, request.getParameter(SearchFilter.POLICY_TYPE));

    ret.setParam(SearchFilter.PLUGIN_HOST_NAME, request.getParameter(SearchFilter.PLUGIN_HOST_NAME));
    ret.setParam(SearchFilter.PLUGIN_APP_TYPE, request.getParameter(SearchFilter.PLUGIN_APP_TYPE));
    ret.setParam(SearchFilter.PLUGIN_ENTITY_TYPE, request.getParameter(SearchFilter.PLUGIN_ENTITY_TYPE));
    ret.setParam(SearchFilter.PLUGIN_IP_ADDRESS, request.getParameter(SearchFilter.PLUGIN_IP_ADDRESS));

    for (Map.Entry<String, String[]> e : request.getParameterMap().entrySet()) {
        String name = e.getKey();
        String[] values = e.getValue();

        if (!StringUtils.isEmpty(name) && !ArrayUtils.isEmpty(values)
                && name.startsWith(SearchFilter.RESOURCE_PREFIX)) {
            ret.setParam(name, values[0]);
        }
    }

    extractCommonCriteriasForFilter(request, ret, sortFields);

    return ret;
}

From source file:org.apache.ranger.plugin.resourcematcher.RangerPathResourceMatcher.java

static boolean isRecursiveWildCardMatch(String pathToCheck, String wildcardPath, char pathSeparatorChar,
        IOCase caseSensitivity) {/*from w  w  w  .  j ava2  s  . c  o  m*/

    boolean ret = false;

    if (!StringUtils.isEmpty(pathToCheck)) {
        String[] pathElements = StringUtils.split(pathToCheck, pathSeparatorChar);

        if (!ArrayUtils.isEmpty(pathElements)) {
            StringBuilder sb = new StringBuilder();

            if (pathToCheck.charAt(0) == pathSeparatorChar) {
                sb.append(pathSeparatorChar); // preserve the initial pathSeparatorChar
            }

            for (String p : pathElements) {
                sb.append(p);

                ret = FilenameUtils.wildcardMatch(sb.toString(), wildcardPath, caseSensitivity);

                if (ret) {
                    break;
                }

                sb.append(pathSeparatorChar);
            }

            sb = null;
        } else { // pathToCheck consists of only pathSeparatorChar
            ret = FilenameUtils.wildcardMatch(pathToCheck, wildcardPath, caseSensitivity);
        }
    }
    return ret;
}

From source file:org.apache.ranger.services.hdfs.client.HdfsClient.java

private List<String> listFilesInternal(String baseDir, String fileMatching, final List<String> pathList)
        throws HadoopException {
    List<String> fileList = new ArrayList<String>();
    String errMsg = " You can still save the repository and start creating "
            + "policies, but you would not be able to use autocomplete for "
            + "resource names. Check ranger_admin.log for more info.";
    try {/*from  w w w.  j  ava 2 s  . c  o  m*/
        String dirPrefix = (baseDir.endsWith("/") ? baseDir : (baseDir + "/"));
        String filterRegEx = null;
        if (fileMatching != null && fileMatching.trim().length() > 0) {
            filterRegEx = fileMatching.trim();
        }

        UserGroupInformation.setConfiguration(conf);

        FileSystem fs = null;
        try {
            fs = FileSystem.get(conf);

            Path basePath = new Path(baseDir);
            FileStatus[] fileStats = fs.listStatus(basePath);

            if (LOG.isDebugEnabled()) {
                LOG.debug("<== HdfsClient fileStatus : " + fileStats.length + " PathList :" + pathList);
            }

            if (fileStats != null) {
                if (fs.exists(basePath) && ArrayUtils.isEmpty(fileStats)) {
                    fileList.add(basePath.toString());
                } else {
                    for (FileStatus stat : fileStats) {
                        Path path = stat.getPath();
                        String pathComponent = path.getName();
                        String prefixedPath = dirPrefix + pathComponent;
                        if (pathList != null && pathList.contains(prefixedPath)) {
                            continue;
                        }
                        if (filterRegEx == null) {
                            fileList.add(prefixedPath);
                        } else if (FilenameUtils.wildcardMatch(pathComponent, fileMatching)) {
                            fileList.add(prefixedPath);
                        }
                    }
                }
            }
        } catch (UnknownHostException uhe) {
            String msgDesc = "listFilesInternal: Unable to connect using given config parameters"
                    + " of Hadoop environment [" + getSerivceName() + "].";
            HadoopException hdpException = new HadoopException(msgDesc, uhe);
            hdpException.generateResponseDataMap(false, getMessage(uhe), msgDesc + errMsg, null, null);
            if (LOG.isDebugEnabled()) {
                LOG.debug("<== HdfsClient listFilesInternal Error : " + uhe);
            }
            throw hdpException;
        } catch (FileNotFoundException fne) {
            String msgDesc = "listFilesInternal: Unable to locate files using given config parameters "
                    + "of Hadoop environment [" + getSerivceName() + "].";
            HadoopException hdpException = new HadoopException(msgDesc, fne);
            hdpException.generateResponseDataMap(false, getMessage(fne), msgDesc + errMsg, null, null);

            if (LOG.isDebugEnabled()) {
                LOG.debug("<== HdfsClient listFilesInternal Error : " + fne);
            }

            throw hdpException;
        }
    } catch (IOException ioe) {
        String msgDesc = "listFilesInternal: Unable to get listing of files for directory " + baseDir
                + fileMatching + "] from Hadoop environment [" + getSerivceName() + "].";
        HadoopException hdpException = new HadoopException(msgDesc, ioe);
        hdpException.generateResponseDataMap(false, getMessage(ioe), msgDesc + errMsg, null, null);
        if (LOG.isDebugEnabled()) {
            LOG.debug("<== HdfsClient listFilesInternal Error : " + ioe);
        }
        throw hdpException;

    } catch (IllegalArgumentException iae) {
        String msgDesc = "Unable to get listing of files for directory [" + baseDir
                + "] from Hadoop environment [" + getSerivceName() + "].";
        HadoopException hdpException = new HadoopException(msgDesc, iae);
        hdpException.generateResponseDataMap(false, getMessage(iae), msgDesc + errMsg, null, null);
        if (LOG.isDebugEnabled()) {
            LOG.debug("<== HdfsClient listFilesInternal Error : " + iae);
        }
        throw hdpException;
    }
    return fileList;
}

From source file:org.apache.ranger.services.sqoop.client.SqoopClient.java

private static ClientResponse getClientResponse(String sqoopUrl, String sqoopApi, String userName) {
    ClientResponse response = null;/*from ww  w. j  a  va2 s.c  o  m*/
    String[] sqoopUrls = sqoopUrl.trim().split("[,;]");
    if (ArrayUtils.isEmpty(sqoopUrls)) {
        return null;
    }

    Client client = Client.create();

    for (String currentUrl : sqoopUrls) {
        if (StringUtils.isBlank(currentUrl)) {
            continue;
        }

        String url = currentUrl.trim() + sqoopApi + "?" + PseudoAuthenticator.USER_NAME + "=" + userName;
        try {
            response = getClientResponse(url, client);

            if (response != null) {
                if (response.getStatus() == HttpStatus.SC_OK) {
                    break;
                } else {
                    response.close();
                }
            }
        } catch (Throwable t) {
            String msgDesc = "Exception while getting sqoop response, sqoopUrl: " + url;
            LOG.error(msgDesc, t);
        }
    }
    client.destroy();

    return response;
}

From source file:org.apache.sling.models.impl.injectors.ResourcePathInjector.java

@Override
public Object getValue(@Nonnull Object adaptable, String name, @Nonnull Type declaredType,
        @Nonnull AnnotatedElement element, @Nonnull DisposalCallbackRegistry callbackRegistry) {
    String[] resourcePaths = null;
    Path pathAnnotation = element.getAnnotation(Path.class);
    ResourcePath resourcePathAnnotation = element.getAnnotation(ResourcePath.class);
    if (pathAnnotation != null) {
        resourcePaths = getPathsFromAnnotation(pathAnnotation);
    } else if (resourcePathAnnotation != null) {
        resourcePaths = getPathsFromAnnotation(resourcePathAnnotation);
    }//  w w  w  .ja v  a  2 s. com
    if (ArrayUtils.isEmpty(resourcePaths) && name != null) {
        // try the valuemap
        ValueMap map = getValueMap(adaptable);
        if (map != null) {
            resourcePaths = map.get(name, String[].class);
        }
    }
    if (ArrayUtils.isEmpty(resourcePaths)) {
        // could not find a path to inject
        return null;
    }

    ResourceResolver resolver = getResourceResolver(adaptable);
    if (resolver == null) {
        return null;
    }
    List<Resource> resources = getResources(resolver, resourcePaths, name);

    if (resources == null || resources.isEmpty()) {
        return null;
    }
    // unwrap if necessary
    if (isDeclaredTypeCollection(declaredType)) {
        return resources;
    } else if (resources.size() == 1) {
        return resources.get(0);
    } else {
        // multiple resources to inject, but field is not a list
        LOG.warn("Cannot inject multiple resources into field {} since it is not declared as a list", name);
        return null;
    }

}

From source file:org.apache.sling.resourceresolver.impl.legacy.LegacyResourceProviderFactoryAdapter.java

@Override
public @CheckForNull QueryLanguageProvider<LegacyResourceProviderAdapter> getQueryLanguageProvider() {
    if (ArrayUtils.isEmpty(languages)) {
        return super.getQueryLanguageProvider();
    } else {//from w  ww  .j ava  2s  . c o m
        return new JCRQueryProviderAdapter(languages);
    }
}

From source file:org.apache.solr.handler.component.FacetComponent.java

/**
 * Actually run the query/*from   w ww  .  ja v  a 2 s .  co m*/
 */
@Override
public void process(ResponseBuilder rb) throws IOException {

    if (rb.doFacets) {
        SolrParams params = rb.req.getParams();
        SimpleFacets f = new SimpleFacets(rb.req, rb.getResults().docSet, params, rb);

        RTimer timer = null;
        FacetDebugInfo fdebug = null;

        if (rb.isDebug()) {
            fdebug = new FacetDebugInfo();
            rb.req.getContext().put("FacetDebugInfo-nonJson", fdebug);
            timer = new RTimer();
        }

        NamedList<Object> counts = FacetComponent.getFacetCounts(f, fdebug);
        String[] pivots = params.getParams(FacetParams.FACET_PIVOT);
        if (!ArrayUtils.isEmpty(pivots)) {
            PivotFacetProcessor pivotProcessor = new PivotFacetProcessor(rb.req, rb.getResults().docSet, params,
                    rb);
            SimpleOrderedMap<List<NamedList<Object>>> v = pivotProcessor.process(pivots);
            if (v != null) {
                counts.add(PIVOT_KEY, v);
            }
        }

        if (fdebug != null) {
            long timeElapsed = (long) timer.getTime();
            fdebug.setElapse(timeElapsed);
        }

        rb.rsp.add("facet_counts", counts);
    }
}

From source file:org.artifactory.common.wicket.component.panel.shortcutkey.KeyEventHandler.java

public void addKeyListener(KeyListener listener, Integer... keyCodes) {
    if (ArrayUtils.isEmpty(keyCodes)) {
        throw new IllegalArgumentException("got empty array of keyCodes");
    }//from w  w w  . j ava 2 s . c o m

    for (Integer keyCode : keyCodes) {
        listenerMap.put(keyCode, listener);
    }
}

From source file:org.b3log.symphony.event.ArticleBaiduSender.java

/**
 * Sends the specified URLs to Baidu./*from  ww  w .j a  v a 2  s . co m*/
 *
 * @param urls the specified URLs
 * @throws Exception exception
 */
public static void sendToBaidu(final String... urls) throws Exception {
    if (ArrayUtils.isEmpty(urls)) {
        return;
    }

    final URLFetchService urlFetchService = URLFetchServiceFactory.getURLFetchService();

    final HTTPRequest request = new HTTPRequest();
    request.setURL(new URL("http://data.zz.baidu.com/urls?site=" + Latkes.getServerHost() + "&token=" + TOKEN));
    request.setRequestMethod(HTTPRequestMethod.POST);
    request.addHeader(new HTTPHeader("User-Agent", "curl/7.12.1"));
    request.addHeader(new HTTPHeader("Host", "data.zz.baidu.com"));
    request.addHeader(new HTTPHeader("Content-Type", "text/plain"));
    request.addHeader(new HTTPHeader("Connection", "close"));

    final String urlsStr = StringUtils.join(urls, "\n");
    request.setPayload(urlsStr.getBytes());

    urlFetchService.fetchAsync(request);

    LOGGER.debug("Sent [" + urlsStr + "] to Baidu");
}

From source file:org.betaconceptframework.astroboa.engine.definition.ContentDefinitionConfiguration.java

private void loadConfigurationFiles(CmsRepository associatedRepository, boolean logWarningIfNoXSDFileFound) {

    //Load BetaConcept Definition files from repository home directory
    //which exists in RepositoryContextImpl
    try {/*from w ww.  j a v a2  s  .c  o m*/

        File[] schemaFiles = retrieveXmlSchemaFiles(associatedRepository, logWarningIfNoXSDFileFound);

        //Create a file configuration for each xsd file
        //This is done in order to track any changes made at runtime to XSD
        //in order to reload definition
        definitionFileConfigurations.put(associatedRepository.getId(), new ArrayList<FileConfiguration>());

        if (ArrayUtils.isEmpty(schemaFiles) && logWarningIfNoXSDFileFound) {
            logger.warn("Found no definition schema files for repository " + associatedRepository);
        } else {
            for (File defFile : schemaFiles) {
                try {
                    logger.debug("Loading definition file {} for repository {}", defFile.getAbsolutePath(),
                            associatedRepository.getId());

                    XMLConfiguration fileConfiguration = new XMLConfiguration(defFile);
                    fileConfiguration.setReloadingStrategy(new FileChangedReloadingStrategy());
                    definitionFileConfigurations.get(associatedRepository.getId()).add(fileConfiguration);
                } catch (Exception e) {
                    logger.error("Error loading definition file " + defFile.getAbsolutePath()
                            + " for repository " + associatedRepository.getId()
                            + "Most probably, it is not a valid XML file. All other definitions will be loaded",
                            e);

                    //Load an empty xml configuration
                    XMLConfiguration fileConfiguration = new XMLConfiguration();
                    definitionFileConfigurations.get(associatedRepository.getId()).add(fileConfiguration);
                }
            }
        }
    } catch (Throwable e) {
        throw new CmsException(e);
    }
}