Example usage for org.joda.time DateTimeZone forID

List of usage examples for org.joda.time DateTimeZone forID

Introduction

In this page you can find the example usage for org.joda.time DateTimeZone forID.

Prototype

@FromString
public static DateTimeZone forID(String id) 

Source Link

Document

Gets a time zone instance for the specified time zone id.

Usage

From source file:org.efaps.util.DateTimeUtil.java

License:Apache License

/**
 * The value that can be set is a Date, a DateTime or a String
 * yyyy-MM-dd'T'HH:mm:ss.SSSZZ. It will be normalized to ISO Calender with
 * TimeZone from SystemAttribute Admin_Common_DataBaseTimeZone. In case
 * that the SystemAttribute is missing UTC will be used.
 *
 *
 * @param _value    value from user interface to translate
 * @return translated date time/*from w ww .  j  a v  a 2s . com*/
 * @throws EFapsException on error
 */
public static DateTime translateFromUI(final Object _value) throws EFapsException {
    final DateTime ret;
    // reads the Value from "Admin_Common_DataBaseTimeZone"
    final String timezoneID = EFapsSystemConfiguration.get().getAttributeValue(KernelSettings.DBTIMEZONE);
    final ISOChronology chron;
    if (timezoneID != null) {
        final DateTimeZone timezone = DateTimeZone.forID(timezoneID);
        chron = ISOChronology.getInstance(timezone);
    } else {
        chron = ISOChronology.getInstanceUTC();
    }
    if (_value instanceof Date) {
        ret = new DateTime(_value).withChronology(chron);
    } else if (_value instanceof DateTime) {
        ret = ((DateTime) _value).withChronology(chron);
    } else if (_value instanceof String) {
        ret = ISODateTimeFormat.dateTime().parseDateTime((String) _value).withChronology(chron);
    } else {
        ret = null;
    }
    return ret;
}

From source file:org.elasticsearch.index.query.QueryStringQueryParser.java

License:Apache License

@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
    XContentParser parser = parseContext.parser();

    String queryName = null;/* w  w w  .  j a va  2 s.  c om*/
    QueryParserSettings qpSettings = new QueryParserSettings();
    qpSettings.defaultField(parseContext.defaultField());
    qpSettings.lenient(parseContext.queryStringLenient());
    qpSettings.analyzeWildcard(defaultAnalyzeWildcard);
    qpSettings.allowLeadingWildcard(defaultAllowLeadingWildcard);
    qpSettings.locale(Locale.ROOT);

    String currentFieldName = null;
    XContentParser.Token token;
    while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
        if (token == XContentParser.Token.FIELD_NAME) {
            currentFieldName = parser.currentName();
        } else if (token == XContentParser.Token.START_ARRAY) {
            if ("fields".equals(currentFieldName)) {
                while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
                    String fField = null;
                    float fBoost = -1;
                    char[] text = parser.textCharacters();
                    int end = parser.textOffset() + parser.textLength();
                    for (int i = parser.textOffset(); i < end; i++) {
                        if (text[i] == '^') {
                            int relativeLocation = i - parser.textOffset();
                            fField = new String(text, parser.textOffset(), relativeLocation);
                            fBoost = Float.parseFloat(
                                    new String(text, i + 1, parser.textLength() - relativeLocation - 1));
                            break;
                        }
                    }
                    if (fField == null) {
                        fField = parser.text();
                    }
                    if (qpSettings.fields() == null) {
                        qpSettings.fields(new ArrayList<String>());
                    }

                    if (Regex.isSimpleMatchPattern(fField)) {
                        for (String field : parseContext.mapperService().simpleMatchToIndexNames(fField)) {
                            qpSettings.fields().add(field);
                            if (fBoost != -1) {
                                if (qpSettings.boosts() == null) {
                                    qpSettings.boosts(new ObjectFloatHashMap<String>());
                                }
                                qpSettings.boosts().put(field, fBoost);
                            }
                        }
                    } else {
                        qpSettings.fields().add(fField);
                        if (fBoost != -1) {
                            if (qpSettings.boosts() == null) {
                                qpSettings.boosts(new ObjectFloatHashMap<String>());
                            }
                            qpSettings.boosts().put(fField, fBoost);
                        }
                    }
                }
            } else {
                throw new QueryParsingException(parseContext,
                        "[query_string] query does not support [" + currentFieldName + "]");
            }
        } else if (token.isValue()) {
            if ("query".equals(currentFieldName)) {
                qpSettings.queryString(parser.text());
            } else if ("default_field".equals(currentFieldName) || "defaultField".equals(currentFieldName)) {
                qpSettings.defaultField(parser.text());
            } else if ("default_operator".equals(currentFieldName)
                    || "defaultOperator".equals(currentFieldName)) {
                String op = parser.text();
                if ("or".equalsIgnoreCase(op)) {
                    qpSettings.defaultOperator(org.apache.lucene.queryparser.classic.QueryParser.Operator.OR);
                } else if ("and".equalsIgnoreCase(op)) {
                    qpSettings.defaultOperator(org.apache.lucene.queryparser.classic.QueryParser.Operator.AND);
                } else {
                    throw new QueryParsingException(parseContext,
                            "Query default operator [" + op + "] is not allowed");
                }
            } else if ("analyzer".equals(currentFieldName)) {
                NamedAnalyzer analyzer = parseContext.analysisService().analyzer(parser.text());
                if (analyzer == null) {
                    throw new QueryParsingException(parseContext,
                            "[query_string] analyzer [" + parser.text() + "] not found");
                }
                qpSettings.forcedAnalyzer(analyzer);
            } else if ("quote_analyzer".equals(currentFieldName) || "quoteAnalyzer".equals(currentFieldName)) {
                NamedAnalyzer analyzer = parseContext.analysisService().analyzer(parser.text());
                if (analyzer == null) {
                    throw new QueryParsingException(parseContext,
                            "[query_string] quote_analyzer [" + parser.text() + "] not found");
                }
                qpSettings.forcedQuoteAnalyzer(analyzer);
            } else if ("allow_leading_wildcard".equals(currentFieldName)
                    || "allowLeadingWildcard".equals(currentFieldName)) {
                qpSettings.allowLeadingWildcard(parser.booleanValue());
            } else if ("auto_generate_phrase_queries".equals(currentFieldName)
                    || "autoGeneratePhraseQueries".equals(currentFieldName)) {
                qpSettings.autoGeneratePhraseQueries(parser.booleanValue());
            } else if ("max_determinized_states".equals(currentFieldName)
                    || "maxDeterminizedStates".equals(currentFieldName)) {
                qpSettings.maxDeterminizedStates(parser.intValue());
            } else if ("lowercase_expanded_terms".equals(currentFieldName)
                    || "lowercaseExpandedTerms".equals(currentFieldName)) {
                qpSettings.lowercaseExpandedTerms(parser.booleanValue());
            } else if ("enable_position_increments".equals(currentFieldName)
                    || "enablePositionIncrements".equals(currentFieldName)) {
                qpSettings.enablePositionIncrements(parser.booleanValue());
            } else if ("escape".equals(currentFieldName)) {
                qpSettings.escape(parser.booleanValue());
            } else if ("use_dis_max".equals(currentFieldName) || "useDisMax".equals(currentFieldName)) {
                qpSettings.useDisMax(parser.booleanValue());
            } else if ("fuzzy_prefix_length".equals(currentFieldName)
                    || "fuzzyPrefixLength".equals(currentFieldName)) {
                qpSettings.fuzzyPrefixLength(parser.intValue());
            } else if ("fuzzy_max_expansions".equals(currentFieldName)
                    || "fuzzyMaxExpansions".equals(currentFieldName)) {
                qpSettings.fuzzyMaxExpansions(parser.intValue());
            } else if ("fuzzy_rewrite".equals(currentFieldName) || "fuzzyRewrite".equals(currentFieldName)) {
                qpSettings.fuzzyRewriteMethod(
                        QueryParsers.parseRewriteMethod(parseContext.parseFieldMatcher(), parser.textOrNull()));
            } else if ("phrase_slop".equals(currentFieldName) || "phraseSlop".equals(currentFieldName)) {
                qpSettings.phraseSlop(parser.intValue());
            } else if (parseContext.parseFieldMatcher().match(currentFieldName, FUZZINESS)) {
                qpSettings.setFuzziness(Fuzziness.parse(parser));
            } else if ("boost".equals(currentFieldName)) {
                qpSettings.boost(parser.floatValue());
            } else if ("tie_breaker".equals(currentFieldName) || "tieBreaker".equals(currentFieldName)) {
                qpSettings.tieBreaker(parser.floatValue());
            } else if ("analyze_wildcard".equals(currentFieldName)
                    || "analyzeWildcard".equals(currentFieldName)) {
                qpSettings.analyzeWildcard(parser.booleanValue());
            } else if ("rewrite".equals(currentFieldName)) {
                qpSettings.rewriteMethod(
                        QueryParsers.parseRewriteMethod(parseContext.parseFieldMatcher(), parser.textOrNull()));
            } else if ("minimum_should_match".equals(currentFieldName)
                    || "minimumShouldMatch".equals(currentFieldName)) {
                qpSettings.minimumShouldMatch(parser.textOrNull());
            } else if ("quote_field_suffix".equals(currentFieldName)
                    || "quoteFieldSuffix".equals(currentFieldName)) {
                qpSettings.quoteFieldSuffix(parser.textOrNull());
            } else if ("lenient".equalsIgnoreCase(currentFieldName)) {
                qpSettings.lenient(parser.booleanValue());
            } else if ("locale".equals(currentFieldName)) {
                String localeStr = parser.text();
                qpSettings.locale(LocaleUtils.parse(localeStr));
            } else if ("time_zone".equals(currentFieldName)) {
                try {
                    qpSettings.timeZone(DateTimeZone.forID(parser.text()));
                } catch (IllegalArgumentException e) {
                    throw new QueryParsingException(parseContext,
                            "[query_string] time_zone [" + parser.text() + "] is unknown");
                }
            } else if ("_name".equals(currentFieldName)) {
                queryName = parser.text();
            } else {
                throw new QueryParsingException(parseContext,
                        "[query_string] query does not support [" + currentFieldName + "]");
            }
        }
    }
    if (qpSettings.queryString() == null) {
        throw new QueryParsingException(parseContext, "query_string must be provided with a [query]");
    }
    qpSettings.defaultAnalyzer(parseContext.mapperService().searchAnalyzer());
    qpSettings.defaultQuoteAnalyzer(parseContext.mapperService().searchQuoteAnalyzer());

    if (qpSettings.escape()) {
        qpSettings.queryString(
                org.apache.lucene.queryparser.classic.QueryParser.escape(qpSettings.queryString()));
    }

    MapperQueryParser queryParser = parseContext.queryParser(qpSettings);

    try {
        Query query = queryParser.parse(qpSettings.queryString());
        if (query == null) {
            return null;
        }
        if (qpSettings.boost() != QueryParserSettings.DEFAULT_BOOST) {
            query.setBoost(query.getBoost() * qpSettings.boost());
        }
        query = fixNegativeQueryIfNeeded(query);
        // If the coordination factor is disabled on a boolean query we don't apply the minimum should match.
        // This is done to make sure that the minimum_should_match doesn't get applied when there is only one word
        // and multiple variations of the same word in the query (synonyms for instance).
        if (query instanceof BooleanQuery && !((BooleanQuery) query).isCoordDisabled()) {
            query = Queries.applyMinimumShouldMatch((BooleanQuery) query, qpSettings.minimumShouldMatch());
        }
        if (queryName != null) {
            parseContext.addNamedQuery(queryName, query);
        }
        return query;
    } catch (org.apache.lucene.queryparser.classic.ParseException e) {
        throw new QueryParsingException(parseContext,
                "Failed to parse query [" + qpSettings.queryString() + "]", e);
    }
}

From source file:org.elasticsearch.index.query.RangeQueryParser.java

License:Apache License

@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
    XContentParser parser = parseContext.parser();

    String fieldName = null;/*from  w  w  w .java  2  s. c o m*/
    Object from = null;
    Object to = null;
    boolean includeLower = true;
    boolean includeUpper = true;
    DateTimeZone timeZone = null;
    DateMathParser forcedDateParser = null;
    float boost = 1.0f;
    String queryName = null;

    String currentFieldName = null;
    XContentParser.Token token;
    while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
        if (token == XContentParser.Token.FIELD_NAME) {
            currentFieldName = parser.currentName();
        } else if (parseContext.isDeprecatedSetting(currentFieldName)) {
            // skip
        } else if (token == XContentParser.Token.START_OBJECT) {
            fieldName = currentFieldName;
            while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
                if (token == XContentParser.Token.FIELD_NAME) {
                    currentFieldName = parser.currentName();
                } else {
                    if ("from".equals(currentFieldName)) {
                        from = parser.objectBytes();
                    } else if ("to".equals(currentFieldName)) {
                        to = parser.objectBytes();
                    } else if ("include_lower".equals(currentFieldName)
                            || "includeLower".equals(currentFieldName)) {
                        includeLower = parser.booleanValue();
                    } else if ("include_upper".equals(currentFieldName)
                            || "includeUpper".equals(currentFieldName)) {
                        includeUpper = parser.booleanValue();
                    } else if ("boost".equals(currentFieldName)) {
                        boost = parser.floatValue();
                    } else if ("gt".equals(currentFieldName)) {
                        from = parser.objectBytes();
                        includeLower = false;
                    } else if ("gte".equals(currentFieldName) || "ge".equals(currentFieldName)) {
                        from = parser.objectBytes();
                        includeLower = true;
                    } else if ("lt".equals(currentFieldName)) {
                        to = parser.objectBytes();
                        includeUpper = false;
                    } else if ("lte".equals(currentFieldName) || "le".equals(currentFieldName)) {
                        to = parser.objectBytes();
                        includeUpper = true;
                    } else if ("time_zone".equals(currentFieldName) || "timeZone".equals(currentFieldName)) {
                        timeZone = DateTimeZone.forID(parser.text());
                    } else if ("format".equals(currentFieldName)) {
                        forcedDateParser = new DateMathParser(Joda.forPattern(parser.text()));
                    } else if ("_name".equals(currentFieldName)) {
                        queryName = parser.text();
                    } else {
                        throw new QueryParsingException(parseContext,
                                "[range] query does not support [" + currentFieldName + "]");
                    }
                }
            }
        } else if (token.isValue()) {
            if (parseContext.parseFieldMatcher().match(currentFieldName, NAME_FIELD)) {
                queryName = parser.text();
            } else if (parseContext.parseFieldMatcher().match(currentFieldName, FIELDDATA_FIELD)) {
                // ignore
            } else {
                throw new QueryParsingException(parseContext,
                        "[range] query does not support [" + currentFieldName + "]");
            }
        }
    }

    Query query = null;
    MappedFieldType mapper = parseContext.fieldMapper(fieldName);
    if (mapper != null) {
        if (mapper instanceof DateFieldMapper.DateFieldType) {
            query = ((DateFieldMapper.DateFieldType) mapper).rangeQuery(from, to, includeLower, includeUpper,
                    timeZone, forcedDateParser);
        } else {
            if (timeZone != null) {
                throw new QueryParsingException(parseContext,
                        "[range] time_zone can not be applied to non date field [" + fieldName + "]");
            }
            //LUCENE 4 UPGRADE Mapper#rangeQuery should use bytesref as well?
            query = mapper.rangeQuery(from, to, includeLower, includeUpper);
        }
    }
    if (query == null) {
        query = new TermRangeQuery(fieldName, BytesRefs.toBytesRef(from), BytesRefs.toBytesRef(to),
                includeLower, includeUpper);
    }
    query.setBoost(boost);
    if (queryName != null) {
        parseContext.addNamedQuery(queryName, query);
    }
    return query;
}

From source file:org.elasticsearch.indices.IndicesRequestCacheIT.java

License:Apache License

public void testCacheAggs() throws Exception {
    assertAcked(client().admin().indices().prepareCreate("index").addMapping("type", "f", "type=date")
            .setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true).get());
    indexRandom(true, client().prepareIndex("index", "type").setSource("f", "2014-03-10T00:00:00.000Z"),
            client().prepareIndex("index", "type").setSource("f", "2014-05-13T00:00:00.000Z"));
    ensureSearchable("index");

    // This is not a random example: serialization with time zones writes shared strings
    // which used to not work well with the query cache because of the handles stream output
    // see #9500//from w w  w.j  a  va2  s.  c  o  m
    final SearchResponse r1 = client().prepareSearch("index").setSize(0)
            .setSearchType(SearchType.QUERY_THEN_FETCH)
            .addAggregation(dateHistogram("histo").field("f").timeZone(DateTimeZone.forID("+01:00"))
                    .minDocCount(0).dateHistogramInterval(DateHistogramInterval.MONTH))
            .get();
    assertSearchResponse(r1);

    // The cached is actually used
    assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal()
            .getRequestCache().getMemorySizeInBytes(), greaterThan(0L));

    for (int i = 0; i < 10; ++i) {
        final SearchResponse r2 = client().prepareSearch("index").setSize(0)
                .setSearchType(SearchType.QUERY_THEN_FETCH)
                .addAggregation(dateHistogram("histo").field("f").timeZone(DateTimeZone.forID("+01:00"))
                        .minDocCount(0).dateHistogramInterval(DateHistogramInterval.MONTH))
                .get();
        assertSearchResponse(r2);
        Histogram h1 = r1.getAggregations().get("histo");
        Histogram h2 = r2.getAggregations().get("histo");
        final List<? extends Bucket> buckets1 = h1.getBuckets();
        final List<? extends Bucket> buckets2 = h2.getBuckets();
        assertEquals(buckets1.size(), buckets2.size());
        for (int j = 0; j < buckets1.size(); ++j) {
            final Bucket b1 = buckets1.get(j);
            final Bucket b2 = buckets2.get(j);
            assertEquals(b1.getKey(), b2.getKey());
            assertEquals(b1.getDocCount(), b2.getDocCount());
        }
    }
}

From source file:org.elasticsearch.search.aggregations.bucket.composite.DateHistogramValuesSourceBuilder.java

License:Apache License

protected DateHistogramValuesSourceBuilder(StreamInput in) throws IOException {
    super(in);//from   w  w w  .  j a va  2s .  c  o m
    this.interval = in.readLong();
    this.dateHistogramInterval = in.readOptionalWriteable(DateHistogramInterval::new);
    if (in.readBoolean()) {
        timeZone = DateTimeZone.forID(in.readString());
    }
}

From source file:org.elasticsearch.search.aggregations.bucket.DateRangeIT.java

License:Apache License

public void testDateMath() throws Exception {
    Map<String, Object> params = new HashMap<>();
    params.put("fieldname", "date");
    DateRangeAggregationBuilder rangeBuilder = dateRange("range");
    if (randomBoolean()) {
        rangeBuilder.field("date");
    } else {//  w ww .ja v  a  2 s.  co  m
        rangeBuilder.script(
                new Script(DateScriptMocks.ExtractFieldScript.NAME, ScriptType.INLINE, "native", params));
    }
    SearchResponse response = client().prepareSearch("idx")
            .addAggregation(rangeBuilder.addUnboundedTo("a long time ago", "now-50y")
                    .addRange("recently", "now-50y", "now-1y").addUnboundedFrom("last year", "now-1y")
                    .timeZone(DateTimeZone.forID("EST")))
            .execute().actionGet();

    assertSearchResponse(response);

    Range range = response.getAggregations().get("range");
    assertThat(range, notNullValue());
    assertThat(range.getName(), equalTo("range"));
    assertThat(range.getBuckets().size(), equalTo(3));

    // TODO: use diamond once JI-9019884 is fixed
    List<Range.Bucket> buckets = new ArrayList<>(range.getBuckets());

    Range.Bucket bucket = buckets.get(0);
    assertThat((String) bucket.getKey(), equalTo("a long time ago"));
    assertThat(bucket.getKeyAsString(), equalTo("a long time ago"));
    assertThat(bucket.getDocCount(), equalTo(0L));

    bucket = buckets.get(1);
    assertThat((String) bucket.getKey(), equalTo("recently"));
    assertThat(bucket.getKeyAsString(), equalTo("recently"));
    assertThat(bucket.getDocCount(), equalTo((long) numDocs));

    bucket = buckets.get(2);
    assertThat((String) bucket.getKey(), equalTo("last year"));
    assertThat(bucket.getKeyAsString(), equalTo("last year"));
    assertThat(bucket.getDocCount(), equalTo(0L));
}

From source file:org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramParser.java

License:Apache License

private DateTimeZone parseZone(String text) throws IOException {
    int index = text.indexOf(':');
    if (index != -1) {
        int beginIndex = text.charAt(0) == '+' ? 1 : 0;
        // format like -02:30
        return DateTimeZone.forOffsetHoursMinutes(Integer.parseInt(text.substring(beginIndex, index)),
                Integer.parseInt(text.substring(index + 1)));
    } else {/*from  w ww.  j  ava  2  s  .co m*/
        // id, listed here: http://joda-time.sourceforge.net/timezones.html
        return DateTimeZone.forID(text);
    }
}

From source file:org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.java

License:Apache License

@Override
public final ValuesSourceAggregationBuilder<VS, ?> parse(String aggregationName, QueryParseContext context)
        throws IOException {

    XContentParser parser = context.parser();
    String field = null;//w ww.j a  va 2 s.c o m
    Script script = null;
    ValueType valueType = null;
    String format = null;
    Object missing = null;
    DateTimeZone timezone = null;
    Map<ParseField, Object> otherOptions = new HashMap<>();

    XContentParser.Token token;
    String currentFieldName = null;
    while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
        if (token == XContentParser.Token.FIELD_NAME) {
            currentFieldName = parser.currentName();
        } else if ("missing".equals(currentFieldName) && token.isValue()) {
            missing = parser.objectText();
        } else if (timezoneAware && context.getParseFieldMatcher().match(currentFieldName, TIME_ZONE)) {
            if (token == XContentParser.Token.VALUE_STRING) {
                timezone = DateTimeZone.forID(parser.text());
            } else if (token == XContentParser.Token.VALUE_NUMBER) {
                timezone = DateTimeZone.forOffsetHours(parser.intValue());
            } else {
                throw new ParsingException(parser.getTokenLocation(), "Unexpected token " + token + " ["
                        + currentFieldName + "] in [" + aggregationName + "].");
            }
        } else if (token == XContentParser.Token.VALUE_STRING) {
            if ("field".equals(currentFieldName)) {
                field = parser.text();
            } else if (formattable && "format".equals(currentFieldName)) {
                format = parser.text();
            } else if (scriptable) {
                if ("value_type".equals(currentFieldName) || "valueType".equals(currentFieldName)) {
                    valueType = ValueType.resolveForScript(parser.text());
                    if (targetValueType != null && valueType.isNotA(targetValueType)) {
                        throw new ParsingException(parser.getTokenLocation(),
                                "Aggregation [" + aggregationName
                                        + "] was configured with an incompatible value type [" + valueType
                                        + "]. It can only work on value of type [" + targetValueType + "]");
                    }
                } else if (!token(aggregationName, currentFieldName, token, parser,
                        context.getParseFieldMatcher(), otherOptions)) {
                    throw new ParsingException(parser.getTokenLocation(), "Unexpected token " + token + " ["
                            + currentFieldName + "] in [" + aggregationName + "].");
                }
            } else if (!token(aggregationName, currentFieldName, token, parser, context.getParseFieldMatcher(),
                    otherOptions)) {
                throw new ParsingException(parser.getTokenLocation(), "Unexpected token " + token + " ["
                        + currentFieldName + "] in [" + aggregationName + "].");
            }
        } else if (scriptable && token == XContentParser.Token.START_OBJECT) {
            if (context.getParseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) {
                script = Script.parse(parser, context.getParseFieldMatcher());
            } else if (!token(aggregationName, currentFieldName, token, parser, context.getParseFieldMatcher(),
                    otherOptions)) {
                throw new ParsingException(parser.getTokenLocation(), "Unexpected token " + token + " ["
                        + currentFieldName + "] in [" + aggregationName + "].");
            }
        } else if (!token(aggregationName, currentFieldName, token, parser, context.getParseFieldMatcher(),
                otherOptions)) {
            throw new ParsingException(parser.getTokenLocation(),
                    "Unexpected token " + token + " [" + currentFieldName + "] in [" + aggregationName + "].");
        }
    }

    ValuesSourceAggregationBuilder<VS, ?> factory = createFactory(aggregationName, this.valuesSourceType,
            this.targetValueType, otherOptions);
    if (field != null) {
        factory.field(field);
    }
    if (script != null) {
        factory.script(script);
    }
    if (valueType != null) {
        factory.valueType(valueType);
    }
    if (format != null) {
        factory.format(format);
    }
    if (missing != null) {
        factory.missing(missing);
    }
    if (timezone != null) {
        factory.timeZone(timezone);
    }
    return factory;
}

From source file:org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder.java

License:Apache License

@SuppressWarnings("unchecked")
@Override// w  w  w  . j  av a2  s.  c om
protected final AB doReadFrom(String name, StreamInput in) throws IOException {
    ValuesSourceType valuesSourceType = ValuesSourceType.ANY.readFrom(in);
    ValueType targetValueType = null;
    if (in.readBoolean()) {
        targetValueType = ValueType.STRING.readFrom(in);
    }
    ValuesSourceAggregatorBuilder<VS, AB> factory = innerReadFrom(name, valuesSourceType, targetValueType, in);
    factory.field = in.readOptionalString();
    if (in.readBoolean()) {
        factory.script = Script.readScript(in);
    }
    if (in.readBoolean()) {
        factory.valueType = ValueType.STRING.readFrom(in);
    }
    factory.format = in.readOptionalString();
    factory.missing = in.readGenericValue();
    if (in.readBoolean()) {
        factory.timeZone = DateTimeZone.forID(in.readString());
    }
    return (AB) factory;
}

From source file:org.elasticsearch.search.aggregations.support.ValuesSourceParser.java

License:Apache License

public boolean token(String currentFieldName, XContentParser.Token token, XContentParser parser)
        throws IOException {
    if ("missing".equals(currentFieldName) && token.isValue()) {
        input.missing = parser.objectText();
        return true;
    }//from   w  ww  .ja  v a2 s  .  co m
    if (token == XContentParser.Token.VALUE_STRING) {
        if ("field".equals(currentFieldName)) {
            input.field = parser.text();
        } else if (formattable && "format".equals(currentFieldName)) {
            input.format = parser.text();
        } else if (timezoneAware && context.parseFieldMatcher().match(currentFieldName, TIME_ZONE)) {
            input.timezone = DateTimeZone.forID(parser.text());
        } else if (scriptable) {
            if ("value_type".equals(currentFieldName) || "valueType".equals(currentFieldName)) {
                input.valueType = ValueType.resolveForScript(parser.text());
                if (targetValueType != null && input.valueType.isNotA(targetValueType)) {
                    throw new SearchParseException(context,
                            aggType.name() + " aggregation [" + aggName
                                    + "] was configured with an incompatible value type [" + input.valueType
                                    + "]. [" + aggType + "] aggregation can only work on value of type ["
                                    + targetValueType + "]",
                            parser.getTokenLocation());
                }
            } else if (!scriptParameterParser.token(currentFieldName, token, parser,
                    context.parseFieldMatcher())) {
                return false;
            }
            return true;
        } else {
            return false;
        }
        return true;
    }
    if (token == XContentParser.Token.VALUE_NUMBER) {
        if (timezoneAware && context.parseFieldMatcher().match(currentFieldName, TIME_ZONE)) {
            input.timezone = DateTimeZone.forOffsetHours(parser.intValue());
        } else {
            return false;
        }
        return true;
    }
    if (scriptable && token == XContentParser.Token.START_OBJECT) {
        if (context.parseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) {
            input.script = Script.parse(parser, context.parseFieldMatcher());
            return true;
        } else if ("params".equals(currentFieldName)) {
            input.params = parser.map();
            return true;
        }
        return false;
    }

    return false;
}