Example usage for org.apache.commons.io FilenameUtils wildcardMatch

List of usage examples for org.apache.commons.io FilenameUtils wildcardMatch

Introduction

In this page you can find the example usage for org.apache.commons.io FilenameUtils wildcardMatch.

Prototype

public static boolean wildcardMatch(String filename, String wildcardMatcher) 

Source Link

Document

Checks a filename to see if it matches the specified wildcard matcher, always testing case-sensitive.

Usage

From source file:org.apache.ranger.services.hive.client.HiveClient.java

public List<String> getClmList(String columnNameMatching, List<String> dbList, List<String> tblList,
        List<String> colList) throws HadoopException {
    if (LOG.isDebugEnabled()) {
        LOG.debug("<== HiveClient.getClmList() columnNameMatching: " + columnNameMatching + " dbList :" + dbList
                + " tblList: " + tblList + " colList: " + colList);
    }/*w  w w.  j a  v a 2s.c o m*/

    List<String> ret = new ArrayList<String>();
    String errMsg = " You can still save the repository and start creating "
            + "policies, but you would not be able to use autocomplete for "
            + "resource names. Check ranger_admin.log for more info.";
    if (con != null) {

        String columnNameMatchingRegEx = null;

        if (columnNameMatching != null && !columnNameMatching.isEmpty()) {
            columnNameMatchingRegEx = columnNameMatching;
        }

        Statement stat = null;
        ResultSet rs = null;

        String sql = null;

        if (dbList != null && !dbList.isEmpty() && tblList != null && !tblList.isEmpty()) {
            for (String db : dbList) {
                for (String tbl : tblList) {
                    try {
                        sql = "use " + db;

                        try {
                            stat = con.createStatement();
                            stat.execute(sql);
                        } finally {
                            close(stat);
                        }

                        sql = "describe  " + tbl;
                        stat = con.createStatement();
                        rs = stat.executeQuery(sql);
                        while (rs.next()) {
                            String columnName = rs.getString(1);
                            if (colList != null && colList.contains(columnName)) {
                                continue;
                            }
                            if (columnNameMatchingRegEx == null) {
                                ret.add(columnName);
                            } else if (FilenameUtils.wildcardMatch(columnName, columnNameMatchingRegEx)) {
                                ret.add(columnName);
                            }
                        }

                    } catch (SQLTimeoutException sqlt) {
                        String msgDesc = "Time Out, Unable to execute SQL [" + sql + "].";
                        HadoopException hdpException = new HadoopException(msgDesc, sqlt);
                        hdpException.generateResponseDataMap(false, getMessage(sqlt), msgDesc + errMsg, null,
                                null);
                        if (LOG.isDebugEnabled()) {
                            LOG.debug("<== HiveClient.getClmList() Error : ", sqlt);
                        }
                        throw hdpException;
                    } catch (SQLException sqle) {
                        String msgDesc = "Unable to execute SQL [" + sql + "].";
                        HadoopException hdpException = new HadoopException(msgDesc, sqle);
                        hdpException.generateResponseDataMap(false, getMessage(sqle), msgDesc + errMsg, null,
                                null);
                        if (LOG.isDebugEnabled()) {
                            LOG.debug("<== HiveClient.getClmList() Error : ", sqle);
                        }
                        throw hdpException;
                    } finally {
                        close(rs);
                        close(stat);
                    }
                }
            }
        }
    }

    if (LOG.isDebugEnabled()) {
        LOG.debug("<== HiveClient.getClmList() " + ret);
    }

    return ret;
}

From source file:org.apache.ranger.services.storm.client.StormClient.java

public List<String> getTopologyList(final String topologyNameMatching, final List<String> stormTopologyList) {

    LOG.debug("Getting Storm topology list for topologyNameMatching : " + topologyNameMatching);
    final String errMsg = errMessage;

    List<String> ret = new ArrayList<String>();

    PrivilegedAction<ArrayList<String>> topologyListGetter = new PrivilegedAction<ArrayList<String>>() {
        @Override//from  w  w w  .  ja  va 2 s  .c  o m
        public ArrayList<String> run() {

            ArrayList<String> lret = new ArrayList<String>();

            String url = stormUIUrl + TOPOLOGY_LIST_API_ENDPOINT;

            Client client = null;
            ClientResponse response = null;

            try {
                client = Client.create();

                WebResource webResource = client.resource(url);

                response = webResource.accept(EXPECTED_MIME_TYPE).get(ClientResponse.class);

                LOG.debug("getTopologyList():calling " + url);

                if (response != null) {
                    LOG.debug("getTopologyList():response.getStatus()= " + response.getStatus());
                    if (response.getStatus() == 200) {
                        String jsonString = response.getEntity(String.class);
                        Gson gson = new GsonBuilder().setPrettyPrinting().create();
                        TopologyListResponse topologyListResponse = gson.fromJson(jsonString,
                                TopologyListResponse.class);
                        if (topologyListResponse != null) {
                            if (topologyListResponse.getTopologyList() != null) {
                                for (Topology topology : topologyListResponse.getTopologyList()) {
                                    String topologyName = topology.getName();
                                    if (stormTopologyList != null && stormTopologyList.contains(topologyName)) {
                                        continue;
                                    }
                                    LOG.debug("getTopologyList():Found topology " + topologyName);
                                    LOG.debug("getTopologyList():topology Name=[" + topology.getName()
                                            + "], topologyNameMatching=[" + topologyNameMatching
                                            + "], existingStormTopologyList=[" + stormTopologyList + "]");
                                    if (topologyName != null) {
                                        if (topologyNameMatching == null || topologyNameMatching.isEmpty()
                                                || FilenameUtils.wildcardMatch(topology.getName(),
                                                        topologyNameMatching + "*")) {
                                            LOG.debug("getTopologyList():Adding topology " + topologyName);
                                            lret.add(topologyName);
                                        }
                                    }
                                }
                            }
                        }
                    } else {
                        LOG.info("getTopologyList():response.getStatus()= " + response.getStatus() + " for URL "
                                + url + ", so returning null list");
                        String jsonString = response.getEntity(String.class);
                        LOG.info(jsonString);
                        lret = null;
                    }
                } else {
                    String msgDesc = "Unable to get a valid response for " + "expected mime type : ["
                            + EXPECTED_MIME_TYPE + "] URL : " + url + " - got null response.";
                    LOG.error(msgDesc);
                    HadoopException hdpException = new HadoopException(msgDesc);
                    hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null, null);
                    throw hdpException;
                }
            } catch (HadoopException he) {
                throw he;
            } catch (Throwable t) {
                String msgDesc = "Exception while getting Storm TopologyList." + " URL : " + url;
                HadoopException hdpException = new HadoopException(msgDesc, t);
                LOG.error(msgDesc, t);

                hdpException.generateResponseDataMap(false, BaseClient.getMessage(t), msgDesc + errMsg, null,
                        null);
                throw hdpException;

            } finally {
                if (response != null) {
                    response.close();
                }

                if (client != null) {
                    client.destroy();
                }

            }
            return lret;
        }
    };

    try {
        ret = executeUnderKerberos(this.userName, this.password, this.lookupPrincipal, this.lookupKeytab,
                this.nameRules, topologyListGetter);
    } catch (IOException e) {
        LOG.error("Unable to get Topology list from [" + stormUIUrl + "]", e);
    }

    return ret;
}

From source file:org.apache.ranger.services.tag.RangerServiceTag.java

@Override
public List<String> lookupResource(ResourceLookupContext context) throws Exception {
    if (LOG.isDebugEnabled()) {
        LOG.debug("==> RangerServiceTag.lookupResource(" + context + ")");
    }//from   w  w  w.ja v a  2 s . c  om

    List<String> ret = new ArrayList<String>();

    if (context != null && StringUtils.equals(context.getResourceName(), TAG_RESOURCE_NAME)) {
        try {
            List<String> tags = tagStore != null ? tagStore.getTagTypes() : null;

            if (CollectionUtils.isNotEmpty(tags)) {
                List<String> valuesToExclude = MapUtils.isNotEmpty(context.getResources())
                        ? context.getResources().get(TAG_RESOURCE_NAME)
                        : null;

                if (CollectionUtils.isNotEmpty(valuesToExclude)) {
                    for (String valueToExclude : valuesToExclude) {
                        tags.remove(valueToExclude);
                    }
                }

                String valueToMatch = context.getUserInput();

                if (StringUtils.isNotEmpty(valueToMatch)) {
                    if (!valueToMatch.endsWith("*")) {
                        valueToMatch += "*";
                    }

                    for (String tag : tags) {
                        if (FilenameUtils.wildcardMatch(tag, valueToMatch)) {
                            ret.add(tag);
                        }
                    }
                }
            }
        } catch (Exception excp) {
            LOG.error("RangerServiceTag.lookupResource()", excp);
        }
    }

    if (LOG.isDebugEnabled()) {
        LOG.debug("<== RangerServiceTag.lookupResource(): tag count=" + ret.size());
    }

    return ret;
}

From source file:org.apache.ranger.storm.client.StormClient.java

public List<String> getTopologyList(final String topologyNameMatching) {

    LOG.debug("Getting Storm topology list for topologyNameMatching : " + topologyNameMatching);
    final String errMsg = " You can still save the repository and start creating "
            + "policies, but you would not be able to use autocomplete for "
            + "resource names. Check xa_portal.log for more info.";

    List<String> ret = new ArrayList<String>();

    PrivilegedAction<ArrayList<String>> topologyListGetter = new PrivilegedAction<ArrayList<String>>() {
        @Override//www .  j a v  a2 s  .  c o m
        public ArrayList<String> run() {

            ArrayList<String> lret = new ArrayList<String>();

            String url = stormUIUrl + TOPOLOGY_LIST_API_ENDPOINT;

            Client client = null;
            ClientResponse response = null;

            try {
                client = Client.create();

                WebResource webResource = client.resource(url);

                response = webResource.accept(EXPECTED_MIME_TYPE).get(ClientResponse.class);

                LOG.debug("getTopologyList():calling " + url);

                if (response != null) {
                    LOG.debug("getTopologyList():response.getStatus()= " + response.getStatus());
                    if (response.getStatus() == 200) {
                        String jsonString = response.getEntity(String.class);
                        Gson gson = new GsonBuilder().setPrettyPrinting().create();
                        TopologyListResponse topologyListResponse = gson.fromJson(jsonString,
                                TopologyListResponse.class);
                        if (topologyListResponse != null) {
                            if (topologyListResponse.getTopologyList() != null) {
                                for (Topology topology : topologyListResponse.getTopologyList()) {
                                    String toplogyName = topology.getName();
                                    LOG.debug("getTopologyList():Found topology " + toplogyName);
                                    LOG.debug("getTopologyList():topology Name=[" + topology.getName()
                                            + "], topologyNameMatching=[" + topologyNameMatching + "]");
                                    if (toplogyName != null) {
                                        if (topologyNameMatching == null || topologyNameMatching.isEmpty()
                                                || FilenameUtils.wildcardMatch(topology.getName(),
                                                        topologyNameMatching + "*")) {
                                            LOG.debug("getTopologyList():Adding topology " + toplogyName);
                                            lret.add(toplogyName);
                                        }
                                    }
                                }
                            }
                        }
                    } else {
                        LOG.info("getTopologyList():response.getStatus()= " + response.getStatus() + " for URL "
                                + url + ", so returning null list");
                        String jsonString = response.getEntity(String.class);
                        LOG.info(jsonString);
                        lret = null;
                    }
                } else {
                    String msgDesc = "Unable to get a valid response for " + "expected mime type : ["
                            + EXPECTED_MIME_TYPE + "] URL : " + url + " - got null response.";
                    LOG.error(msgDesc);
                    HadoopException hdpException = new HadoopException(msgDesc);
                    hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null, null);
                    throw hdpException;
                }
            } catch (HadoopException he) {
                throw he;
            } catch (Throwable t) {
                String msgDesc = "Exception while getting Storm TopologyList." + " URL : " + url;
                HadoopException hdpException = new HadoopException(msgDesc, t);
                LOG.error(msgDesc, t);

                hdpException.generateResponseDataMap(false, BaseClient.getMessage(t), msgDesc + errMsg, null,
                        null);
                throw hdpException;

            } finally {
                if (response != null) {
                    response.close();
                }

                if (client != null) {
                    client.destroy();
                }

            }
            return lret;
        }
    };

    try {
        ret = executeUnderKerberos(this.userName, this.password, topologyListGetter);
    } catch (IOException e) {
        LOG.error("Unable to get Topology list from [" + stormUIUrl + "]", e);
    }

    return ret;
}

From source file:org.apache.solr.search.ReturnFields.java

public boolean wantsField(String name) {
    if (_wantsAllFields || okFieldNames.contains(name)) {
        return true;
    }// ww  w.j  a v  a2  s  . c o m
    for (String s : globs) {
        // TODO something better?
        if (FilenameUtils.wildcardMatch(name, s)) {
            okFieldNames.add(name); // Don't calculate it again
            return true;
        }
    }
    return false;
}

From source file:org.apache.solr.search.SolrReturnFields.java

@Override
public boolean wantsField(String name) {
    if (_wantsAllFields || okFieldNames.contains(name)) {
        return true;
    }// w  ww  .j a v a 2s .c  o m
    for (String s : globs) {
        // TODO something better?
        if (FilenameUtils.wildcardMatch(name, s)) {
            okFieldNames.add(name); // Don't calculate it again
            return true;
        }
    }
    return false;
}

From source file:org.carlspring.maven.commons.io.filters.DirectoryFilter.java

private boolean containsMatches(String dirName, Set<String> includes) {
    boolean matches = false;
    for (String include : includes) {
        if (include.contains("*") || include.contains("?")) {
            if (FilenameUtils.wildcardMatch(dirName, include)) {
                matches = true;/*www.  j a  v a2s  . c  o  m*/
                break;
            }
        } else {
            if (include.equals(dirName)) {
                matches = true;
                break;
            }
        }
    }

    return matches;
}

From source file:org.eclipse.orion.internal.server.search.FileGrepper.java

/**
 * Check if the file path is acceptable.
 * @param filename The file path string.
 * @return True is the file passes all the filename patterns (with wildcards)
 *//*from ww  w.  j a va  2 s. c  o m*/
private boolean acceptFilename(String filename) {
    if (options.getFilenamePattern() == null) {
        return true;
    }
    String filenamePattern = options.getFilenamePattern();
    boolean match = false;
    if (options.isFilenamePatternCaseSensitive()) {
        match = FilenameUtils.wildcardMatch(filename, filenamePattern);
    } else {
        match = FilenameUtils.wildcardMatch(filename.toLowerCase(), filenamePattern.toLowerCase());
    }
    return match;
}

From source file:org.eclipse.orion.internal.server.search.grep.FileGrepper.java

/**
 * Check if the file path is acceptable.
 * @param filename The file path string.
 * @return True is the file passes all the filename patterns (with wildcards)
 *///  w  w w  . j a  va2  s  .  c  om
private boolean acceptFilename(String filename) {
    if (options.getFilenamePattern() == null) {
        return true;
    }
    String filenamePattern = options.getFilenamePattern();
    boolean match = false;
    if (options.isCaseSensitive()) {
        match = FilenameUtils.wildcardMatch(filename, filenamePattern);
    } else {
        match = FilenameUtils.wildcardMatch(filename.toLowerCase(), filenamePattern.toLowerCase());
    }
    return match;
}

From source file:org.efs.openreports.delivery.FileSystemDeliveryMethod.java

public DeliveredReport[] getDeliveredReports(ReportUser user) throws DeliveryException {
    IOFileFilter extensionFilter = FileFilterUtils.suffixFileFilter("xml");

    File directory = new File(directoryProvider.getReportGenerationDirectory());

    ArrayList<DeliveredReport> deliveredReports = new ArrayList<DeliveredReport>();

    Iterator iterator = FileUtils.iterateFiles(directory, extensionFilter, null);
    while (iterator.hasNext()) {
        File file = (File) iterator.next();

        if (FilenameUtils.wildcardMatch(file.getName(), "*" + user.getName() + "*")) {
            XStream xStream = new XStream();
            xStream.alias("reportGenerationInfo", DeliveredReport.class);

            try {
                FileInputStream inputStream = new FileInputStream(file);

                DeliveredReport report = (DeliveredReport) xStream.fromXML(inputStream);

                deliveredReports.add(report);

                inputStream.close();/*from w w  w.  j a v a2  s. c  om*/
            } catch (IOException io) {
                log.warn(io.toString());
            }
        }
    }

    DeliveredReport[] reports = new DeliveredReport[deliveredReports.size()];
    deliveredReports.toArray(reports);

    return reports;
}