Example usage for org.apache.commons.lang StringUtils substringBefore

List of usage examples for org.apache.commons.lang StringUtils substringBefore

Introduction

In this page you can find the example usage for org.apache.commons.lang StringUtils substringBefore.

Prototype

public static String substringBefore(String str, String separator) 

Source Link

Document

Gets the substring before the first occurrence of a separator.

Usage

From source file:org.apache.archiva.redback.common.ldap.role.DefaultLdapRoleMapper.java

public List<String> getGroupsMember(String group, DirContext context) throws MappingException {

    NamingEnumeration<SearchResult> namingEnumeration = null;
    try {//from   w  ww . j a  v  a 2 s  . c  om

        SearchControls searchControls = new SearchControls();

        searchControls.setDerefLinkFlag(true);
        searchControls.setSearchScope(SearchControls.SUBTREE_SCOPE);

        String filter = "objectClass=" + getLdapGroupClass();

        namingEnumeration = context.search("cn=" + group + "," + getGroupsDn(), filter, searchControls);

        List<String> allMembers = new ArrayList<String>();

        while (namingEnumeration.hasMore()) {
            SearchResult searchResult = namingEnumeration.next();

            Attribute uniqueMemberAttr = searchResult.getAttributes().get(getLdapGroupMember());

            if (uniqueMemberAttr != null) {
                NamingEnumeration<String> allMembersEnum = (NamingEnumeration<String>) uniqueMemberAttr
                        .getAll();
                while (allMembersEnum.hasMore()) {
                    String userName = allMembersEnum.next();
                    // uid=blabla we only want bla bla
                    userName = StringUtils.substringAfter(userName, "=");
                    userName = StringUtils.substringBefore(userName, ",");
                    log.debug("found userName for group {}: '{}", group, userName);

                    allMembers.add(userName);
                }
                close(allMembersEnum);
            }

        }

        return allMembers;
    } catch (LdapException e) {
        throw new MappingException(e.getMessage(), e);
    } catch (NamingException e) {
        throw new MappingException(e.getMessage(), e);
    }

    finally {
        close(namingEnumeration);
    }
}

From source file:org.apache.archiva.redback.common.ldap.role.DefaultLdapRoleMapper.java

public List<String> getGroups(String username, DirContext context) throws MappingException {

    List<String> userGroups = new ArrayList<String>();

    NamingEnumeration<SearchResult> namingEnumeration = null;
    try {/*  w ww .j a v a2s  . c o m*/

        SearchControls searchControls = new SearchControls();

        searchControls.setDerefLinkFlag(true);
        searchControls.setSearchScope(SearchControls.SUBTREE_SCOPE);
        String groupEntry = null;
        try {
            //try to look the user up
            User user = userManager.findUser(username);
            if (user instanceof LdapUser) {
                LdapUser ldapUser = LdapUser.class.cast(user);
                Attribute dnAttribute = ldapUser.getOriginalAttributes().get(getLdapDnAttribute());
                if (dnAttribute != null) {
                    groupEntry = String.class.cast(dnAttribute.get());
                }

            }
        } catch (UserNotFoundException e) {
            log.warn("Failed to look up user {}. Computing distinguished name manually", username, e);
        } catch (UserManagerException e) {
            log.warn("Failed to look up user {}. Computing distinguished name manually", username, e);
        }
        if (groupEntry == null) {
            //failed to look up the user's groupEntry directly
            StringBuilder builder = new StringBuilder();
            String posixGroup = "posixGroup";
            if (posixGroup.equals(getLdapGroupClass())) {
                builder.append(username);
            } else {
                builder.append(this.userIdAttribute).append("=").append(username).append(",")
                        .append(getBaseDn());
            }
            groupEntry = builder.toString();
        }

        String filter = new StringBuilder().append("(&").append("(objectClass=" + getLdapGroupClass() + ")")
                .append("(").append(getLdapGroupMember()).append("=").append(Rdn.escapeValue(groupEntry))
                .append(")").append(")").toString();

        log.debug("filter: {}", filter);

        namingEnumeration = context.search(getGroupsDn(), filter, searchControls);

        while (namingEnumeration.hasMore()) {
            SearchResult searchResult = namingEnumeration.next();

            List<String> allMembers = new ArrayList<String>();

            Attribute uniqueMemberAttr = searchResult.getAttributes().get(getLdapGroupMember());

            if (uniqueMemberAttr != null) {
                NamingEnumeration<String> allMembersEnum = (NamingEnumeration<String>) uniqueMemberAttr
                        .getAll();
                while (allMembersEnum.hasMore()) {

                    String userName = allMembersEnum.next();
                    //the original dn
                    allMembers.add(userName);
                    // uid=blabla we only want bla bla
                    userName = StringUtils.substringAfter(userName, "=");
                    userName = StringUtils.substringBefore(userName, ",");
                    allMembers.add(userName);
                }
                close(allMembersEnum);
            }

            if (allMembers.contains(username)) {
                String groupName = searchResult.getName();
                // cn=blabla we only want bla bla
                groupName = StringUtils.substringAfter(groupName, "=");
                userGroups.add(groupName);

            } else if (allMembers.contains(groupEntry)) {
                String groupName = searchResult.getName();
                // cn=blabla we only want bla bla
                groupName = StringUtils.substringAfter(groupName, "=");
                userGroups.add(groupName);
            }

        }

        return userGroups;
    } catch (LdapException e) {
        throw new MappingException(e.getMessage(), e);
    } catch (NamingException e) {
        throw new MappingException(e.getMessage(), e);
    } finally {
        close(namingEnumeration);
    }
}

From source file:org.apache.archiva.redback.users.ldap.ctl.DefaultLdapController.java

public Map<String, Collection<String>> findUsersWithRoles(DirContext dirContext)
        throws LdapControllerException {
    Map<String, Collection<String>> usersWithRoles = new HashMap<String, Collection<String>>();

    NamingEnumeration<SearchResult> namingEnumeration = null;
    try {/*w  ww .  j  ava 2  s  .c om*/

        SearchControls searchControls = new SearchControls();

        searchControls.setDerefLinkFlag(true);
        searchControls.setSearchScope(SearchControls.SUBTREE_SCOPE);

        String filter = "objectClass=" + getLdapGroupClass();

        namingEnumeration = dirContext.search(getGroupsDn(), filter, searchControls);

        while (namingEnumeration.hasMore()) {
            SearchResult searchResult = namingEnumeration.next();

            String groupName = searchResult.getName();
            // cn=blabla we only want bla bla
            groupName = StringUtils.substringAfter(groupName, "=");

            Attribute uniqueMemberAttr = searchResult.getAttributes().get("uniquemember");

            if (uniqueMemberAttr != null) {
                NamingEnumeration<String> allMembersEnum = (NamingEnumeration<String>) uniqueMemberAttr
                        .getAll();
                while (allMembersEnum.hasMore()) {
                    String userName = allMembersEnum.next();
                    // uid=blabla we only want bla bla
                    userName = StringUtils.substringAfter(userName, "=");
                    userName = StringUtils.substringBefore(userName, ",");
                    Collection<String> roles = usersWithRoles.get(userName);
                    if (roles == null) {
                        roles = new HashSet<String>();
                    }

                    roles.add(groupName);

                    usersWithRoles.put(userName, roles);

                }
            }

            log.debug("found groupName: '{}' with users: {}", groupName);

        }

        return usersWithRoles;
    } catch (NamingException e) {
        throw new LdapControllerException(e.getMessage(), e);
    }

    finally {

        if (namingEnumeration != null) {
            try {
                namingEnumeration.close();
            } catch (NamingException e) {
                log.warn("failed to close search results", e);
            }
        }
    }
}

From source file:org.apache.archiva.rest.services.DefaultBrowseService.java

/**
 * @param path/*  w ww.j  a va 2 s . c o  m*/
 * @return org/apache -&gt; org , org -&gt; org
 */
private String getRootPath(String path) {
    if (StringUtils.contains(path, '/')) {
        return StringUtils.substringBefore(path, "/");
    }
    return path;
}

From source file:org.apache.archiva.web.docs.RestDocsServlet.java

@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {

    logger.debug("docs request to path: {}", req.getPathInfo());

    String path = StringUtils.removeStart(req.getPathInfo(), "/");
    InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(path);

    if (StringUtils.endsWith(path, ".xsd")) {
        StringEscapeUtils.escapeXml(resp.getWriter(), IOUtils.toString(is));
        //IOUtils.copy( is, resp.getOutputStream() );
        return;/*  w  w  w.  j  a  va 2 s  .c om*/
    }

    String startPath = StringUtils.substringBefore(path, "/");

    // replace all links !!
    Document document = Jsoup.parse(is, "UTF-8", "");

    Element body = document.body().child(0);

    Elements links = body.select("a[href]");

    for (Element link : links) {
        link.attr("href", "#" + startPath + "/" + link.attr("href"));
    }

    Elements datalinks = body.select("[data-href]");

    for (Element link : datalinks) {
        link.attr("data-href", "#" + startPath + "/" + link.attr("data-href"));
    }

    Elements codes = body.select("code");

    for (Element code : codes) {
        code.attr("class", code.attr("class") + " nice-code");
    }

    //default generated enunciate use h1/h2/h3 which is quite big so transform to h3/h4/h5

    Elements headers = body.select("h1");

    for (Element header : headers) {
        header.tagName("h3");
    }

    headers = body.select("h2");

    for (Element header : headers) {
        header.tagName("h4");
    }

    headers = body.select("h3");

    for (Element header : headers) {
        header.tagName("h5");
    }

    Document res = new Document("");
    res.appendChild(body.select("div[id=main]").first());

    Elements scripts = body.select("script");
    for (Element script : scripts) {
        res.appendChild(script);
    }
    resp.getOutputStream().write(res.outerHtml().getBytes());

}

From source file:org.apache.hadoop.hbase.mapreduce.RowCounter.java

/**
 * Sets up the actual job.//from  www. j a v a 2 s.c o m
 *
 * @param conf  The current configuration.
 * @param args  The command line parameters.
 * @return The newly created job.
 * @throws IOException When setting up the job fails.
 */
public static Job createSubmittableJob(Configuration conf, String[] args) throws IOException {
    String tableName = args[0];
    String startKey = null;
    String endKey = null;
    StringBuilder sb = new StringBuilder();

    final String rangeSwitch = "--range=";

    // First argument is table name, starting from second
    for (int i = 1; i < args.length; i++) {
        if (args[i].startsWith(rangeSwitch)) {
            String[] startEnd = args[i].substring(rangeSwitch.length()).split(",", 2);
            if (startEnd.length != 2 || startEnd[1].contains(",")) {
                printUsage("Please specify range in such format as \"--range=a,b\" "
                        + "or, with only one boundary, \"--range=,b\" or \"--range=a,\"");
                return null;
            }
            startKey = startEnd[0];
            endKey = startEnd[1];
        } else {
            // if no switch, assume column names
            sb.append(args[i]);
            sb.append(" ");
        }
    }

    Job job = new Job(conf, NAME + "_" + tableName);
    job.setJarByClass(RowCounter.class);
    Scan scan = new Scan();
    scan.setCacheBlocks(false);
    Set<byte[]> qualifiers = new TreeSet<byte[]>(Bytes.BYTES_COMPARATOR);
    if (startKey != null && !startKey.equals("")) {
        scan.setStartRow(Bytes.toBytes(startKey));
    }
    if (endKey != null && !endKey.equals("")) {
        scan.setStopRow(Bytes.toBytes(endKey));
    }
    if (sb.length() > 0) {
        for (String columnName : sb.toString().trim().split(" ")) {
            String family = StringUtils.substringBefore(columnName, ":");
            String qualifier = StringUtils.substringAfter(columnName, ":");

            if (StringUtils.isBlank(qualifier)) {
                scan.addFamily(Bytes.toBytes(family));
            } else {
                scan.addColumn(Bytes.toBytes(family), Bytes.toBytes(qualifier));
            }
        }
    }
    // specified column may or may not be part of first key value for the row.
    // Hence do not use FirstKeyOnlyFilter if scan has columns, instead use
    // FirstKeyValueMatchingQualifiersFilter.
    if (qualifiers.size() == 0) {
        scan.setFilter(new FirstKeyOnlyFilter());
    } else {
        scan.setFilter(new FirstKeyValueMatchingQualifiersFilter(qualifiers));
    }
    job.setOutputFormatClass(NullOutputFormat.class);
    TableMapReduceUtil.initTableMapperJob(tableName, scan, RowCounterMapper.class, ImmutableBytesWritable.class,
            Result.class, job);
    job.setNumReduceTasks(0);
    return job;
}

From source file:org.apache.hive.beeline.util.QFileClient.java

public QFileClient setQFileName(String qFileName) {
    this.qFileName = qFileName;
    this.qFile = new File(qFileDirectory, qFileName);
    this.testname = StringUtils.substringBefore(qFileName, ".");
    expectedFile = new File(expectedDirectory, qFileName + ".out");
    outputFile = new File(outputDirectory, qFileName + ".out");
    return this;
}

From source file:org.apache.hive.hcatalog.mapreduce.FileOutputCommitterContainer.java

private void registerPartitions(JobContext context) throws IOException {
    if (dynamicPartitioningUsed) {
        discoverPartitions(context);//from  w w  w . ja  v  a 2s.  c o  m
    }
    OutputJobInfo jobInfo = HCatOutputFormat.getJobInfo(context.getConfiguration());
    Configuration conf = context.getConfiguration();
    Table table = new Table(jobInfo.getTableInfo().getTable());
    Path tblPath = new Path(table.getTTable().getSd().getLocation());
    FileSystem fs = tblPath.getFileSystem(conf);
    IMetaStoreClient client = null;
    HCatTableInfo tableInfo = jobInfo.getTableInfo();
    List<Partition> partitionsAdded = new ArrayList<Partition>();
    try {
        HiveConf hiveConf = HCatUtil.getHiveConf(conf);
        client = HCatUtil.getHiveMetastoreClient(hiveConf);
        if (table.getPartitionKeys().size() == 0) {
            // Move data from temp directory the actual table directory
            // No metastore operation required.
            Path src = new Path(jobInfo.getLocation());
            moveTaskOutputs(fs, src, src, tblPath, false, table.isImmutable());
            if (!src.equals(tblPath)) {
                fs.delete(src, true);
            }
            if (table.getParameters() != null
                    && table.getParameters().containsKey(StatsSetupConst.COLUMN_STATS_ACCURATE)) {
                table.getParameters().remove(StatsSetupConst.COLUMN_STATS_ACCURATE);
                client.alter_table(table.getDbName(), table.getTableName(), table.getTTable());
            }
            return;
        }

        StorerInfo storer = InternalUtil.extractStorerInfo(table.getTTable().getSd(), table.getParameters());

        FileStatus tblStat = fs.getFileStatus(tblPath);
        String grpName = tblStat.getGroup();
        FsPermission perms = tblStat.getPermission();

        List<Partition> partitionsToAdd = new ArrayList<Partition>();
        if (!dynamicPartitioningUsed) {
            partitionsToAdd.add(constructPartition(context, jobInfo, tblPath.toString(), null,
                    jobInfo.getPartitionValues(), jobInfo.getOutputSchema(), getStorerParameterMap(storer),
                    table, fs, grpName, perms));
        } else {
            for (Entry<String, Map<String, String>> entry : partitionsDiscoveredByPath.entrySet()) {
                partitionsToAdd.add(constructPartition(context, jobInfo,
                        getPartitionRootLocation(entry.getKey(), entry.getValue().size()), entry.getKey(),
                        entry.getValue(), jobInfo.getOutputSchema(), getStorerParameterMap(storer), table, fs,
                        grpName, perms));
            }
        }

        ArrayList<Map<String, String>> ptnInfos = new ArrayList<Map<String, String>>();
        for (Partition ptn : partitionsToAdd) {
            ptnInfos.add(InternalUtil.createPtnKeyValueMap(new Table(tableInfo.getTable()), ptn));
        }

        /**
         * Dynamic partitioning & Append incompatibility note:
         *
         * Currently, we do not support mixing dynamic partitioning and append in the
         * same job. One reason is that we need exhaustive testing of corner cases
         * for that, and a second reason is the behaviour of add_partitions. To support
         * dynamic partitioning with append, we'd have to have a add_partitions_if_not_exist
         * call, rather than an add_partitions call. Thus far, we've tried to keep the
         * implementation of append jobtype-agnostic, but here, in code, we assume that
         * a table is considered immutable if dynamic partitioning is enabled on the job.
         *
         * This does not mean that we can check before the job begins that this is going
         * to be a dynamic partition job on an immutable table and thus fail the job, since
         * it is quite possible to have a dynamic partitioning job run on an unpopulated
         * immutable table. It simply means that at the end of the job, as far as copying
         * in data is concerned, we will pretend that the table is immutable irrespective
         * of what table.isImmutable() tells us.
         */

        //Publish the new partition(s)
        if (dynamicPartitioningUsed && harProcessor.isEnabled() && (!partitionsToAdd.isEmpty())) {

            if (!customDynamicLocationUsed) {
                Path src = new Path(ptnRootLocation);
                // check here for each dir we're copying out, to see if it
                // already exists, error out if so.
                // Also, treat dyn-writes as writes to immutable tables.
                moveTaskOutputs(fs, src, src, tblPath, true, true); // dryRun = true, immutable = true
                moveTaskOutputs(fs, src, src, tblPath, false, true);
                if (!src.equals(tblPath)) {
                    fs.delete(src, true);
                }
            } else {
                moveCustomLocationTaskOutputs(fs, table, hiveConf);
            }
            try {
                updateTableSchema(client, table, jobInfo.getOutputSchema());
                LOG.info("HAR is being used. The table {} has new partitions {}.", table.getTableName(),
                        ptnInfos);
                client.add_partitions(partitionsToAdd);
                partitionsAdded = partitionsToAdd;
            } catch (Exception e) {
                // There was an error adding partitions : rollback fs copy and rethrow
                for (Partition p : partitionsToAdd) {
                    Path ptnPath = new Path(harProcessor.getParentFSPath(new Path(p.getSd().getLocation())));
                    if (fs.exists(ptnPath)) {
                        fs.delete(ptnPath, true);
                    }
                }
                throw e;
            }

        } else {

            // no harProcessor, regular operation
            updateTableSchema(client, table, jobInfo.getOutputSchema());
            LOG.info("HAR not is not being used. The table {} has new partitions {}.", table.getTableName(),
                    ptnInfos);
            if (partitionsToAdd.size() > 0) {
                if (!dynamicPartitioningUsed) {

                    // regular single-partition write into a partitioned table.
                    //Move data from temp directory the actual table directory
                    if (partitionsToAdd.size() > 1) {
                        throw new HCatException(ErrorType.ERROR_PUBLISHING_PARTITION,
                                "More than one partition to publish in non-dynamic partitioning job");
                    }
                    Partition p = partitionsToAdd.get(0);
                    Path src = new Path(jobInfo.getLocation());
                    Path dest = new Path(p.getSd().getLocation());
                    moveTaskOutputs(fs, src, src, dest, true, table.isImmutable());
                    moveTaskOutputs(fs, src, src, dest, false, table.isImmutable());
                    if (!src.equals(dest)) {
                        if (src.toString()
                                .matches(".*" + Path.SEPARATOR + SCRATCH_DIR_NAME + "\\d\\.?\\d+.*")) {
                            // src is scratch directory, need to trim the part key value pairs from path
                            String diff = StringUtils.difference(src.toString(), dest.toString());
                            fs.delete(new Path(StringUtils.substringBefore(src.toString(), diff)), true);
                        } else {
                            fs.delete(src, true);
                        }
                    }

                    // Now, we check if the partition already exists. If not, we go ahead.
                    // If so, we error out if immutable, and if mutable, check that the partition's IF
                    // matches our current job's IF (table's IF) to check for compatibility. If compatible, we
                    // ignore and do not add. If incompatible, we error out again.

                    boolean publishRequired = false;
                    try {
                        Partition existingP = client.getPartition(p.getDbName(), p.getTableName(),
                                p.getValues());
                        if (existingP != null) {
                            if (table.isImmutable()) {
                                throw new HCatException(ErrorType.ERROR_DUPLICATE_PARTITION,
                                        "Attempted duplicate partition publish on to immutable table");
                            } else {
                                if (!existingP.getSd().getInputFormat()
                                        .equals(table.getInputFormatClass().getName())) {
                                    throw new HCatException(ErrorType.ERROR_PUBLISHING_PARTITION,
                                            "Attempted partition append, where old partition format was "
                                                    + existingP.getSd().getInputFormat()
                                                    + " and table format was "
                                                    + table.getInputFormatClass().getName());
                                }
                            }
                        } else {
                            publishRequired = true;
                        }
                    } catch (NoSuchObjectException e) {
                        // All good, no such partition exists, move on.
                        publishRequired = true;
                    }
                    if (publishRequired) {
                        client.add_partitions(partitionsToAdd);
                        partitionsAdded = partitionsToAdd;
                    }

                } else {
                    // Dynamic partitioning usecase
                    if (!customDynamicLocationUsed) {
                        Path src = new Path(ptnRootLocation);
                        moveTaskOutputs(fs, src, src, tblPath, true, true); // dryRun = true, immutable = true
                        moveTaskOutputs(fs, src, src, tblPath, false, true);
                        if (!src.equals(tblPath)) {
                            fs.delete(src, true);
                        }
                    } else {
                        moveCustomLocationTaskOutputs(fs, table, hiveConf);
                    }
                    client.add_partitions(partitionsToAdd);
                    partitionsAdded = partitionsToAdd;
                }
            }

            // Set permissions appropriately for each of the partitions we just created
            // so as to have their permissions mimic the table permissions
            for (Partition p : partitionsAdded) {
                applyGroupAndPerms(fs, new Path(p.getSd().getLocation()), tblStat.getPermission(),
                        tblStat.getGroup(), true);
            }

        }
    } catch (Exception e) {
        if (partitionsAdded.size() > 0) {
            try {
                // baseCommitter.cleanupJob failed, try to clean up the
                // metastore
                for (Partition p : partitionsAdded) {
                    client.dropPartition(tableInfo.getDatabaseName(), tableInfo.getTableName(), p.getValues(),
                            true);
                }
            } catch (Exception te) {
                // Keep cause as the original exception
                throw new HCatException(ErrorType.ERROR_PUBLISHING_PARTITION, e);
            }
        }
        if (e instanceof HCatException) {
            throw (HCatException) e;
        } else {
            throw new HCatException(ErrorType.ERROR_PUBLISHING_PARTITION, e);
        }
    } finally {
        HCatUtil.closeHiveClientQuietly(client);
    }
}

From source file:org.apache.jackrabbit.core.query.lucene.FacetHandler.java

private void extractFacetInfo(NamedList<Object> info, SolrParams solrParams) {
    // Parse the queries
    _facetQuery = new LinkedHashMap<String, Long>();
    NamedList<Long> fq = (NamedList<Long>) info.get("facet_queries");
    if (fq != null) {
        for (Map.Entry<String, Long> entry : fq) {
            _facetQuery.put(entry.getKey(), entry.getValue());
        }/*from  w  w w  .  ja v a 2  s.  c  om*/
    }

    // Parse the facet info into fields
    // TODO?? The list could be <int> or <long>? If always <long> then we can switch to <Long>
    NamedList<NamedList<Number>> ff = (NamedList<NamedList<Number>>) info.get("facet_fields");
    Map<String, FieldType> fieldTypeMap = new HashMap<>();
    if (ff != null) {
        _facetFields = new ArrayList<FacetField>(ff.size());
        _limitingFacets = new ArrayList<FacetField>(ff.size());
        long minsize = totalSize;
        for (Map.Entry<String, NamedList<Number>> facet : ff) {
            String key = StringUtils.substringBeforeLast(facet.getKey(),
                    SimpleJahiaJcrFacets.PROPNAME_INDEX_SEPARATOR);
            String fieldInIndex = StringUtils.substringAfterLast(facet.getKey(),
                    SimpleJahiaJcrFacets.PROPNAME_INDEX_SEPARATOR);
            FacetField f = new FacetField(key);
            if (!fieldTypeMap.containsKey(key)) {
                try {
                    //Find a key like f.field_name#unknownumber.facet.nodetype
                    Pattern facetNodetype = Pattern.compile("f\\." + key + "#[0-9]+\\.facet\\.nodetype");
                    String nodetypeName = null;
                    Iterator<String> parameterNamesIterator = solrParams.getParameterNamesIterator();
                    while (parameterNamesIterator.hasNext()) {
                        String next = parameterNamesIterator.next();
                        if (facetNodetype.matcher(next).matches()) {
                            nodetypeName = solrParams.get(next);
                            break;
                        }
                    }
                    ExtendedPropertyDefinition epd = NodeTypeRegistry.getInstance().getNodeType(nodetypeName)
                            .getPropertyDefinition(key);
                    fieldTypeMap.put(key, getType(epd));
                } catch (NoSuchNodeTypeException e) {
                    log.error(e.getMessage(), e);
                }
            }
            for (Map.Entry<String, Number> entry : facet.getValue()) {
                String facetValue = entry.getKey();
                String query = fieldTypeMap.get(key).toInternal(entry.getKey());
                Matcher matcher = valueWithQuery.matcher(facetValue);
                if (matcher.matches()) {
                    query = matcher.group(2);
                    facetValue = matcher.replaceFirst("$1");
                }
                f.add(facetValue, entry.getValue().longValue());
                f.getValues().get(f.getValueCount() - 1).setFilterQuery(
                        ClientUtils.escapeQueryChars(fieldInIndex) + ":" + ClientUtils.escapeQueryChars(query));
            }

            _facetFields.add(f);
            FacetField nl = f.getLimitingFields(minsize);
            if (nl.getValueCount() > 0) {
                _limitingFacets.add(nl);
            }
        }
    }

    // Parse date facets
    NamedList<NamedList<Object>> df = (NamedList<NamedList<Object>>) info.get("facet_dates");
    if (df != null) {
        // System.out.println(df);
        _facetDates = new ArrayList<FacetField>(df.size());
        for (Map.Entry<String, NamedList<Object>> facet : df) {
            // System.out.println("Key: " + facet.getKey() + " Value: " + facet.getValue());
            NamedList<Object> values = facet.getValue();
            String gap = (String) values.get("gap");
            Date end = (Date) values.get("end");
            FacetField f = new FacetField(StringUtils.substringBeforeLast(facet.getKey(),
                    SimpleJahiaJcrFacets.PROPNAME_INDEX_SEPARATOR), gap, end);

            for (Map.Entry<String, Object> entry : values) {
                try {
                    String key = StringUtils.substringBeforeLast(entry.getKey(),
                            SimpleJahiaJcrFacets.PROPNAME_INDEX_SEPARATOR);
                    String query = StringUtils.substringAfterLast(entry.getKey(),
                            SimpleJahiaJcrFacets.PROPNAME_INDEX_SEPARATOR);
                    f.add(key, Long.parseLong(entry.getValue().toString()));
                    if (!StringUtils.isEmpty(query)) {
                        String rangePrefix = null;
                        if (query.contains(RANGEFROM_EXCLUSIVE_PREFIX)) {
                            rangePrefix = RANGEFROM_EXCLUSIVE_PREFIX;
                        } else if (query.contains(RANGEFROM_INCLUSIVE_PREFIX)) {
                            rangePrefix = RANGEFROM_INCLUSIVE_PREFIX;
                        }
                        if (!StringUtils.isEmpty(rangePrefix)) {
                            f.getValues().get(f.getValueCount() - 1)
                                    .setFilterQuery(ClientUtils
                                            .escapeQueryChars(StringUtils.substringBefore(query, rangePrefix))
                                            + rangePrefix + StringUtils.substringAfter(query, rangePrefix));
                        }
                    }
                } catch (NumberFormatException e) {
                    // Ignore for non-number responses which are already handled above
                }
            }

            _facetDates.add(f);
        }
    }

    // Parse range facets
    NamedList<NamedList<Object>> rf = (NamedList<NamedList<Object>>) info.get("facet_ranges");
    if (rf != null) {
        // System.out.println(df);
        _facetRanges = new ArrayList<RangeFacet>(rf.size());
        for (Map.Entry<String, NamedList<Object>> facet : rf) {
            NamedList<Object> values = facet.getValue();
            Object rawGap = values.get("gap");

            RangeFacet rangeFacet;
            if (rawGap instanceof Number) {
                Number gap = (Number) rawGap;
                Number start = (Number) values.get("start");
                Number end = (Number) values.get("end");

                Number before = (Number) values.get("before");
                Number after = (Number) values.get("after");

                rangeFacet = new RangeFacet.Numeric(StringUtils.substringBeforeLast(facet.getKey(),
                        SimpleJahiaJcrFacets.PROPNAME_INDEX_SEPARATOR), start, end, gap, before, after);
            } else {
                String gap = (String) rawGap;
                Date start = (Date) values.get("start");
                Date end = (Date) values.get("end");

                Number before = (Number) values.get("before");
                Number after = (Number) values.get("after");

                rangeFacet = new RangeFacet.Date(StringUtils.substringBeforeLast(facet.getKey(),
                        SimpleJahiaJcrFacets.PROPNAME_INDEX_SEPARATOR), start, end, gap, before, after);
            }

            NamedList<Integer> counts = (NamedList<Integer>) values.get("counts");
            for (Map.Entry<String, Integer> entry : counts) {
                try {
                    String key = StringUtils.substringBeforeLast(entry.getKey(),
                            SimpleJahiaJcrFacets.PROPNAME_INDEX_SEPARATOR);
                    String query = StringUtils.substringAfterLast(entry.getKey(),
                            SimpleJahiaJcrFacets.PROPNAME_INDEX_SEPARATOR);

                    rangeFacet.addCount(key, entry.getValue());

                    if (!StringUtils.isEmpty(query)) {
                        String rangePrefix = null;
                        if (query.contains(RANGEFROM_EXCLUSIVE_PREFIX)) {
                            rangePrefix = RANGEFROM_EXCLUSIVE_PREFIX;
                        } else if (query.contains(RANGEFROM_INCLUSIVE_PREFIX)) {
                            rangePrefix = RANGEFROM_INCLUSIVE_PREFIX;
                        }
                        if (!StringUtils.isEmpty(rangePrefix)) {
                            ((RangeFacet.Count) rangeFacet.getCounts().get(rangeFacet.getCounts().size() - 1))
                                    .setFilterQuery(ClientUtils
                                            .escapeQueryChars(StringUtils.substringBefore(query, rangePrefix))
                                            + rangePrefix + StringUtils.substringAfter(query, rangePrefix));
                        }
                    }
                } catch (NumberFormatException e) {
                    // Ignore for non-number responses which are already handled above
                }
            }

            _facetRanges.add(rangeFacet);
        }
    }
}

From source file:org.apache.jackrabbit.core.query.lucene.JahiaIndexingConfigurationImpl.java

private void processExcludeConfiguration(Element element, NamespaceRegistry namespaceRegistry)
        throws RepositoryException {
    String nodeType = element.getAttribute("nodetype");
    if (StringUtils.isNotEmpty(nodeType)) {
        if (excludesTypesByPath.isEmpty()) {
            excludesTypesByPath = new HashSet<>();
        }/* www.ja va  2s  .  c om*/
        String path = element.getAttribute("path");
        Boolean isRegexp = Boolean.valueOf(element.getAttribute("isRegexp"));
        NameFactory nf = NameFactoryImpl.getInstance();
        Name nodeTypeName = null;
        try {
            if (!nodeType.startsWith("{")) {
                nodeTypeName = nf.create(namespaceRegistry.getURI(StringUtils.substringBefore(nodeType, ":")),
                        StringUtils.substringAfter(nodeType, ":"));
            } else {
                nodeTypeName = nf.create(nodeType);
            }
        } catch (NamespaceException e) {
            logger.error("Cannot parse namespace for " + nodeType, e);
        } catch (IllegalArgumentException iae) {
            logger.error("Illegal node type name: " + nodeType, iae);
        }
        excludesTypesByPath.add(new ExcludedType(nodeTypeName, path, isRegexp));
    }
}