Example usage for org.apache.commons.lang3.text StrTokenizer next

List of usage examples for org.apache.commons.lang3.text StrTokenizer next

Introduction

In this page you can find the example usage for org.apache.commons.lang3.text StrTokenizer next.

Prototype

@Override
public String next() 

Source Link

Document

Gets the next token.

Usage

From source file:me.ineson.demo.service.utils.RestUtils.java

/**
 * @param where/*from   w w w . java2s . c o  m*/
 * @param root
 * @param query
 * @param builder
 * @param translations
 * @return
 */
public static Predicate parseWhereClause(String where, Root<?> root, CriteriaQuery<?> query,
        CriteriaBuilder builder, Map<String, String> translations) {

    List<Predicate> predicates = new ArrayList<Predicate>();
    for (String singleCriteria : new StrTokenizer(where, ",").getTokenList()) {
        if (StringUtils.isNotBlank(singleCriteria)) {
            int equalsIndex = singleCriteria.indexOf("=");
            if (equalsIndex > 0) {
                String fieldPath = singleCriteria.substring(0, equalsIndex);
                String value = singleCriteria.substring(equalsIndex + 1);

                if (translations != null && translations.containsKey(fieldPath)) {
                    String newFieldPath = translations.get(fieldPath);
                    log.debug("replacing field {} with {} ", fieldPath, newFieldPath);
                    fieldPath = newFieldPath;
                }

                StrTokenizer tokenizer = new StrTokenizer(fieldPath, ".");

                javax.persistence.criteria.Path<?> expression = null;
                while (tokenizer.hasNext()) {
                    String field = tokenizer.next();
                    if (tokenizer.hasNext()) {
                        if (expression == null) {
                            expression = root.join(field);
                        } else {
                            // expression = expression.join( field);
                            throw new IllegalArgumentException(
                                    "Paths to joins of greater than a depth of 1 are not implemented yet");
                        }
                    } else {
                        if (expression == null) {
                            log.info("expression0 {}", expression);
                            expression = root.get(field);
                            log.info("expression1 {}", expression);
                        } else {
                            expression = expression.get(field);
                        }
                    }
                }

                Object realValue = value;
                if ("bodyType".equals(fieldPath)) {
                    me.ineson.demo.service.SolarBodyType solarBodyType = me.ineson.demo.service.SolarBodyType
                            .valueOf(value);
                    switch (solarBodyType) {
                    case PLANET:
                        realValue = SolarBodyType.Planet;
                        break;

                    case SUN:
                        realValue = SolarBodyType.Sun;
                        break;

                    case DWARF_PLANET:
                        realValue = SolarBodyType.DwarfPlanet;
                        break;

                    default:
                        realValue = solarBodyType;
                    }
                    log.info("enum bodyType before {} after {}", value, realValue);
                }

                log.info("expression9 {}", expression);
                predicates.add(builder.equal(expression, realValue));
            }

        }
    }

    log.debug("predictes ");
    if (predicates.size() == 0) {
        return null;
    }
    if (predicates.size() == 1) {
        return predicates.get(0);
    }
    return builder.and(predicates.toArray(new Predicate[predicates.size()]));
}

From source file:org.apache.stratos.adc.mgt.cli.completer.CommandCompleter.java

@Override
public int complete(String buffer, int cursor, List<CharSequence> candidates) {
    if (logger.isTraceEnabled()) {
        logger.trace("Buffer: {}, cursor: {}", buffer, cursor);
        logger.trace("Candidates {}", candidates);
    }// w  ww.j  a v  a2 s  .co  m
    if (StringUtils.isNotBlank(buffer)) {
        // User is typing a command
        StrTokenizer strTokenizer = new StrTokenizer(buffer);
        String action = strTokenizer.next();
        Collection<String> arguments = argumentMap.get(action);
        if (arguments != null) {
            if (logger.isTraceEnabled()) {
                logger.trace("Arguments found for {}, Tokens: {}", action, strTokenizer.getTokenList());
                logger.trace("Arguments for {}: {}", action, arguments);
            }
            List<String> args = new ArrayList<String>(arguments);
            List<Completer> completers = new ArrayList<Completer>();
            for (String token : strTokenizer.getTokenList()) {
                boolean argContains = arguments.contains(token);
                if (token.startsWith("-") && !argContains) {
                    continue;
                }
                if (argContains) {
                    if (logger.isTraceEnabled()) {
                        logger.trace("Removing argument {}", token);
                    }
                    args.remove(token);
                }
                completers.add(new StringsCompleter(token));
            }
            completers.add(new StringsCompleter(args));
            Completer completer = new ArgumentCompleter(completers);
            return completer.complete(buffer, cursor, candidates);
        } else if (CliConstants.HELP_ACTION.equals(action)) {
            // For help action, we need to display available commands as arguments
            return helpCommandCompleter.complete(buffer, cursor, candidates);
        }
    }
    if (logger.isTraceEnabled()) {
        logger.trace("Using Default Completer...");
    }
    return defaultCommandCompleter.complete(buffer, cursor, candidates);
}

From source file:org.grouplens.lenskit.data.dao.MapItemNameDAO.java

/**
 * Read an item list DAO from a file./* w ww .  j  a va  2  s  .  c o m*/
 * @param file A file of item IDs, one per line.
 * @return The item list DAO.
 * @throws java.io.IOException if there is an error reading the list of items.
 */
public static MapItemNameDAO fromCSVFile(File file) throws IOException {
    LineCursor cursor = LineCursor.openFile(file, CompressionMode.AUTO);
    try {
        ImmutableMap.Builder<Long, String> names = ImmutableMap.builder();
        StrTokenizer tok = StrTokenizer.getCSVInstance();
        for (String line : cursor) {
            tok.reset(line);
            long item = Long.parseLong(tok.next());
            String title = tok.nextToken();
            if (title != null) {
                names.put(item, title);
            }
        }
        return new MapItemNameDAO(names.build());
    } catch (NoSuchElementException ex) {
        throw new IOException(String.format("%s:%s: not enough columns", file, cursor.getLineNumber()), ex);
    } catch (NumberFormatException ex) {
        throw new IOException(String.format("%s:%s: id not an integer", file, cursor.getLineNumber()), ex);
    } finally {
        cursor.close();
    }
}

From source file:org.labkey.npod.DonorToolsSettings.java

public void setIncludedDatasetNames(String includedDatasetIds) {
    if (null != includedDatasetIds) {
        _includedDatasetIds = new ArrayList<>();
        StrTokenizer tokenizer = StrTokenizer.getCSVInstance(includedDatasetIds);
        while (tokenizer.hasNext()) {
            _includedDatasetIds.add(Integer.valueOf(tokenizer.next()));
        }//from   www.  ja va  2  s .  c o m
    } else {
        _includedDatasetIds = new ArrayList<>();
    }
}

From source file:org.labkey.npod.DonorToolsSettings.java

public void setPrioritySampleTypes(String prioritySampleTypes) {
    if (null == prioritySampleTypes) {
        _prioritySampleTypeIds = new ArrayList<>();
    } else {/*from ww  w.  jav a 2 s  .co  m*/
        _prioritySampleTypeIds = new ArrayList<>();
        StrTokenizer tokenizer = StrTokenizer.getCSVInstance(prioritySampleTypes);
        while (tokenizer.hasNext()) {
            _prioritySampleTypeIds.add(Integer.valueOf(tokenizer.next()));
        }
    }
}

From source file:org.lenskit.data.dao.file.DelimitedColumnEntityFormat.java

@Override
public LineEntityParser makeParser(List<String> header) {
    assert header.size() == getHeaderLines();

    if (usesHeader() && labeledColumns != null) {
        assert header.size() == 1;
        List<TypedName<?>> cols = new ArrayList<>();
        StrTokenizer tok = new StrTokenizer(header.get(0), delimiter);
        while (tok.hasNext()) {
            String label = tok.next();
            cols.add(labeledColumns.get(label));
        }//from  w ww  . j a v  a2s.  c  o m
        return new OrderedParser(cols, tok);
    } else {
        return new OrderedParser(columns, new StrTokenizer("", delimiter));
    }
}

From source file:org.lenskit.data.dao.MapItemNameDAO.java

/**
 * Read an item list DAO from a file./*from  www .  ja  v a 2  s.c om*/
 * @param file A file of item IDs, one per line.
 * @param skipLines The number of initial header to skip
 * @return The item list DAO.
 * @throws java.io.IOException if there is an error reading the list of items.
 */
public static MapItemNameDAO fromCSVFile(File file, int skipLines) throws IOException {
    Preconditions.checkArgument(skipLines >= 0, "cannot skip negative lines");
    LineStream stream = LineStream.openFile(file, CompressionMode.AUTO);
    try {
        ObjectStreams.consume(skipLines, stream);
        ImmutableMap.Builder<Long, String> names = ImmutableMap.builder();
        StrTokenizer tok = StrTokenizer.getCSVInstance();
        for (String line : stream) {
            tok.reset(line);
            long item = Long.parseLong(tok.next());
            String title = tok.nextToken();
            if (title != null) {
                names.put(item, title);
            }
        }
        return new MapItemNameDAO(names.build());
    } catch (NoSuchElementException ex) {
        throw new IOException(String.format("%s:%s: not enough columns", file, stream.getLineNumber()), ex);
    } catch (NumberFormatException ex) {
        throw new IOException(String.format("%s:%s: id not an integer", file, stream.getLineNumber()), ex);
    } finally {
        stream.close();
    }
}