Example usage for org.apache.lucene.index CompositeReaderContext leaves

List of usage examples for org.apache.lucene.index CompositeReaderContext leaves

Introduction

In this page you can find the example usage for org.apache.lucene.index CompositeReaderContext leaves.

Prototype

List leaves

To view the source code for org.apache.lucene.index CompositeReaderContext leaves.

Click Source Link

Usage

From source file:org.apache.blur.manager.writer.IndexImporter.java

License:Apache License

private void applyDeletes(Directory directory, IndexWriter indexWriter, String shard, boolean emitDeletes)
        throws IOException {
    DirectoryReader reader = DirectoryReader.open(directory);
    try {/*from  ww  w.j a va 2 s.c om*/
        LOG.info("Applying deletes in reader [{0}]", reader);
        CompositeReaderContext compositeReaderContext = reader.getContext();
        List<AtomicReaderContext> leaves = compositeReaderContext.leaves();
        BlurPartitioner blurPartitioner = new BlurPartitioner();
        Text key = new Text();
        int numberOfShards = _shardContext.getTableContext().getDescriptor().getShardCount();
        int shardId = ShardUtil.getShardIndex(shard);
        for (AtomicReaderContext context : leaves) {
            AtomicReader atomicReader = context.reader();
            Fields fields = atomicReader.fields();
            Terms terms = fields.terms(BlurConstants.ROW_ID);
            if (terms != null) {
                TermsEnum termsEnum = terms.iterator(null);
                BytesRef ref = null;
                while ((ref = termsEnum.next()) != null) {
                    key.set(ref.bytes, ref.offset, ref.length);
                    int partition = blurPartitioner.getPartition(key, null, numberOfShards);
                    if (shardId != partition) {
                        throw new IOException("Index is corrupted, RowIds are found in wrong shard, partition ["
                                + partition + "] does not shard [" + shardId
                                + "], this can happen when rows are not hashed correctly.");
                    }
                    if (emitDeletes) {
                        indexWriter.deleteDocuments(new Term(BlurConstants.ROW_ID, BytesRef.deepCopyOf(ref)));
                    }
                }
            }
        }
    } finally {
        reader.close();
    }
}

From source file:org.elasticsearch.index.fielddata.DuelFieldDataTests.java

License:Apache License

@Test
public void testDuelAllTypesSingleValue() throws Exception {
    final String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
            .startObject("properties").startObject("bytes").field("type", "string")
            .field("index", "not_analyzed").startObject("fielddata")
            .field("format", LuceneTestCase.defaultCodecSupportsSortedSet() ? "doc_values" : "fst").endObject()
            .endObject().startObject("byte").field("type", "byte").startObject("fielddata")
            .field("format", "doc_values").endObject().endObject().startObject("short").field("type", "short")
            .startObject("fielddata").field("format", "doc_values").endObject().endObject()
            .startObject("integer").field("type", "integer").startObject("fielddata")
            .field("format", "doc_values").endObject().endObject().startObject("long").field("type", "long")
            .startObject("fielddata").field("format", "doc_values").endObject().endObject().startObject("float")
            .field("type", "float").startObject("fielddata").field("format", "doc_values").endObject()
            .endObject().startObject("double").field("type", "double").startObject("fielddata")
            .field("format", "doc_values").endObject().endObject().endObject().endObject().endObject().string();
    final DocumentMapper mapper = MapperTestUtils.newParser().parse(mapping);
    Random random = getRandom();//from   w  w w  .ja  va2  s  .com
    int atLeast = atLeast(random, 1000);
    for (int i = 0; i < atLeast; i++) {
        String s = Integer.toString(randomByte());

        XContentBuilder doc = XContentFactory.jsonBuilder().startObject();
        for (String fieldName : Arrays.asList("bytes", "byte", "short", "integer", "long", "float", "double")) {
            doc = doc.field(fieldName, s);
        }

        doc = doc.endObject();

        final ParsedDocument d = mapper.parse("type", Integer.toString(i), doc.bytes());

        writer.addDocument(d.rootDoc());

        if (random.nextInt(10) == 0) {
            refreshReader();
        }
    }
    AtomicReaderContext context = refreshReader();
    Map<FieldDataType, Type> typeMap = new HashMap<FieldDataType, DuelFieldDataTests.Type>();
    typeMap.put(new FieldDataType("string", ImmutableSettings.builder().put("format", "fst")), Type.Bytes);
    typeMap.put(new FieldDataType("string", ImmutableSettings.builder().put("format", "paged_bytes")),
            Type.Bytes);
    typeMap.put(new FieldDataType("byte", ImmutableSettings.builder().put("format", "array")), Type.Integer);
    typeMap.put(new FieldDataType("short", ImmutableSettings.builder().put("format", "array")), Type.Integer);
    typeMap.put(new FieldDataType("int", ImmutableSettings.builder().put("format", "array")), Type.Integer);
    typeMap.put(new FieldDataType("long", ImmutableSettings.builder().put("format", "array")), Type.Long);
    typeMap.put(new FieldDataType("double", ImmutableSettings.builder().put("format", "array")), Type.Double);
    typeMap.put(new FieldDataType("float", ImmutableSettings.builder().put("format", "array")), Type.Float);
    typeMap.put(new FieldDataType("byte", ImmutableSettings.builder().put("format", "doc_values")),
            Type.Integer);
    typeMap.put(new FieldDataType("short", ImmutableSettings.builder().put("format", "doc_values")),
            Type.Integer);
    typeMap.put(new FieldDataType("int", ImmutableSettings.builder().put("format", "doc_values")),
            Type.Integer);
    typeMap.put(new FieldDataType("long", ImmutableSettings.builder().put("format", "doc_values")), Type.Long);
    typeMap.put(new FieldDataType("double", ImmutableSettings.builder().put("format", "doc_values")),
            Type.Double);
    typeMap.put(new FieldDataType("float", ImmutableSettings.builder().put("format", "doc_values")),
            Type.Float);
    if (LuceneTestCase.defaultCodecSupportsSortedSet()) {
        typeMap.put(new FieldDataType("string", ImmutableSettings.builder().put("format", "doc_values")),
                Type.Bytes);
    }
    ArrayList<Entry<FieldDataType, Type>> list = new ArrayList<Entry<FieldDataType, Type>>(typeMap.entrySet());
    Preprocessor pre = new ToDoublePreprocessor();
    while (!list.isEmpty()) {
        Entry<FieldDataType, Type> left;
        Entry<FieldDataType, Type> right;
        if (list.size() > 1) {
            left = list.remove(random.nextInt(list.size()));
            right = list.remove(random.nextInt(list.size()));
        } else {
            right = left = list.remove(0);
        }

        ifdService.clear();
        IndexFieldData<?> leftFieldData = getForField(left.getKey(),
                left.getValue().name().toLowerCase(Locale.ROOT));
        ifdService.clear();
        IndexFieldData<?> rightFieldData = getForField(right.getKey(),
                right.getValue().name().toLowerCase(Locale.ROOT));
        duelFieldDataBytes(random, context, leftFieldData, rightFieldData, pre);
        duelFieldDataBytes(random, context, rightFieldData, leftFieldData, pre);

        DirectoryReader perSegment = DirectoryReader.open(writer, true);
        CompositeReaderContext composite = perSegment.getContext();
        List<AtomicReaderContext> leaves = composite.leaves();
        for (AtomicReaderContext atomicReaderContext : leaves) {
            duelFieldDataBytes(random, atomicReaderContext, leftFieldData, rightFieldData, pre);
        }
    }
}

From source file:org.elasticsearch.index.fielddata.DuelFieldDataTests.java

License:Apache License

@Test
public void testDuelIntegers() throws Exception {
    final String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
            .startObject("properties").startObject("byte").field("type", "byte").startObject("fielddata")
            .field("format", "doc_values").endObject().endObject().startObject("short").field("type", "short")
            .startObject("fielddata").field("format", "doc_values").endObject().endObject()
            .startObject("integer").field("type", "integer").startObject("fielddata")
            .field("format", "doc_values").endObject().endObject().startObject("long").field("type", "long")
            .startObject("fielddata").field("format", "doc_values").endObject().endObject().endObject()
            .endObject().endObject().string();

    final DocumentMapper mapper = MapperTestUtils.newParser().parse(mapping);
    Random random = getRandom();//from   www .j  av  a 2  s .co  m
    int atLeast = atLeast(random, 1000);
    final int maxNumValues = randomBoolean() ? 1 : randomIntBetween(2, 40);
    byte[] values = new byte[maxNumValues];
    for (int i = 0; i < atLeast; i++) {
        final int numValues = randomInt(maxNumValues);
        for (int j = 0; j < numValues; ++j) {
            if (randomBoolean()) {
                values[j] = 1; // test deduplication
            } else {
                values[j] = randomByte();
            }
        }

        XContentBuilder doc = XContentFactory.jsonBuilder().startObject();
        for (String fieldName : Arrays.asList("byte", "short", "integer", "long")) {
            doc = doc.startArray(fieldName);
            for (int j = 0; j < numValues; ++j) {
                doc = doc.value(values[j]);
            }
            doc = doc.endArray();
        }
        doc = doc.endObject();

        final ParsedDocument d = mapper.parse("type", Integer.toString(i), doc.bytes());

        writer.addDocument(d.rootDoc());
        if (random.nextInt(10) == 0) {
            refreshReader();
        }
    }
    AtomicReaderContext context = refreshReader();
    Map<FieldDataType, Type> typeMap = new HashMap<FieldDataType, Type>();
    typeMap.put(new FieldDataType("byte", ImmutableSettings.builder().put("format", "array")), Type.Integer);
    typeMap.put(new FieldDataType("short", ImmutableSettings.builder().put("format", "array")), Type.Integer);
    typeMap.put(new FieldDataType("int", ImmutableSettings.builder().put("format", "array")), Type.Integer);
    typeMap.put(new FieldDataType("long", ImmutableSettings.builder().put("format", "array")), Type.Long);
    typeMap.put(new FieldDataType("byte", ImmutableSettings.builder().put("format", "doc_values")),
            Type.Integer);
    typeMap.put(new FieldDataType("short", ImmutableSettings.builder().put("format", "doc_values")),
            Type.Integer);
    typeMap.put(new FieldDataType("int", ImmutableSettings.builder().put("format", "doc_values")),
            Type.Integer);
    typeMap.put(new FieldDataType("long", ImmutableSettings.builder().put("format", "doc_values")), Type.Long);
    ArrayList<Entry<FieldDataType, Type>> list = new ArrayList<Entry<FieldDataType, Type>>(typeMap.entrySet());
    while (!list.isEmpty()) {
        Entry<FieldDataType, Type> left;
        Entry<FieldDataType, Type> right;
        if (list.size() > 1) {
            left = list.remove(random.nextInt(list.size()));
            right = list.remove(random.nextInt(list.size()));
        } else {
            right = left = list.remove(0);
        }
        ifdService.clear();
        IndexNumericFieldData<?> leftFieldData = getForField(left.getKey(),
                left.getValue().name().toLowerCase(Locale.ROOT));
        ifdService.clear();
        IndexNumericFieldData<?> rightFieldData = getForField(right.getKey(),
                right.getValue().name().toLowerCase(Locale.ROOT));

        duelFieldDataLong(random, context, leftFieldData, rightFieldData);
        duelFieldDataLong(random, context, rightFieldData, leftFieldData);

        DirectoryReader perSegment = DirectoryReader.open(writer, true);
        CompositeReaderContext composite = perSegment.getContext();
        List<AtomicReaderContext> leaves = composite.leaves();
        for (AtomicReaderContext atomicReaderContext : leaves) {
            duelFieldDataLong(random, atomicReaderContext, leftFieldData, rightFieldData);
        }
    }

}

From source file:org.elasticsearch.index.fielddata.DuelFieldDataTests.java

License:Apache License

@Test
public void testDuelDoubles() throws Exception {
    final String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
            .startObject("properties").startObject("float").field("type", "float").startObject("fielddata")
            .field("format", "doc_values").endObject().endObject().startObject("double").field("type", "double")
            .startObject("fielddata").field("format", "doc_values").endObject().endObject().endObject()
            .endObject().endObject().string();

    final DocumentMapper mapper = MapperTestUtils.newParser().parse(mapping);
    Random random = getRandom();//from  ww  w.j  a  v a  2s .  com
    int atLeast = atLeast(random, 1000);
    final int maxNumValues = randomBoolean() ? 1 : randomIntBetween(2, 40);
    float[] values = new float[maxNumValues];
    for (int i = 0; i < atLeast; i++) {
        final int numValues = randomInt(maxNumValues);
        float def = randomBoolean() ? randomFloat() : Float.NaN;
        for (int j = 0; j < numValues; ++j) {
            if (randomBoolean()) {
                values[j] = def;
            } else {
                values[j] = randomFloat();
            }
        }

        XContentBuilder doc = XContentFactory.jsonBuilder().startObject().startArray("float");
        for (int j = 0; j < numValues; ++j) {
            doc = doc.value(values[j]);
        }
        doc = doc.endArray().startArray("double");
        for (int j = 0; j < numValues; ++j) {
            doc = doc.value(values[j]);
        }
        doc = doc.endArray().endObject();

        final ParsedDocument d = mapper.parse("type", Integer.toString(i), doc.bytes());

        writer.addDocument(d.rootDoc());
        if (random.nextInt(10) == 0) {
            refreshReader();
        }
    }
    AtomicReaderContext context = refreshReader();
    Map<FieldDataType, Type> typeMap = new HashMap<FieldDataType, Type>();
    typeMap.put(new FieldDataType("double", ImmutableSettings.builder().put("format", "array")), Type.Double);
    typeMap.put(new FieldDataType("float", ImmutableSettings.builder().put("format", "array")), Type.Float);
    typeMap.put(new FieldDataType("double", ImmutableSettings.builder().put("format", "doc_values")),
            Type.Double);
    typeMap.put(new FieldDataType("float", ImmutableSettings.builder().put("format", "doc_values")),
            Type.Float);
    ArrayList<Entry<FieldDataType, Type>> list = new ArrayList<Entry<FieldDataType, Type>>(typeMap.entrySet());
    while (!list.isEmpty()) {
        Entry<FieldDataType, Type> left;
        Entry<FieldDataType, Type> right;
        if (list.size() > 1) {
            left = list.remove(random.nextInt(list.size()));
            right = list.remove(random.nextInt(list.size()));
        } else {
            right = left = list.remove(0);
        }
        ifdService.clear();
        IndexNumericFieldData<?> leftFieldData = getForField(left.getKey(),
                left.getValue().name().toLowerCase(Locale.ROOT));

        ifdService.clear();
        IndexNumericFieldData<?> rightFieldData = getForField(right.getKey(),
                right.getValue().name().toLowerCase(Locale.ROOT));

        assertOrder(left.getValue().order(), leftFieldData, context);
        assertOrder(right.getValue().order(), rightFieldData, context);
        duelFieldDataDouble(random, context, leftFieldData, rightFieldData);
        duelFieldDataDouble(random, context, rightFieldData, leftFieldData);

        DirectoryReader perSegment = DirectoryReader.open(writer, true);
        CompositeReaderContext composite = perSegment.getContext();
        List<AtomicReaderContext> leaves = composite.leaves();
        for (AtomicReaderContext atomicReaderContext : leaves) {
            duelFieldDataDouble(random, atomicReaderContext, leftFieldData, rightFieldData);
        }
    }

}

From source file:org.elasticsearch.index.fielddata.DuelFieldDataTests.java

License:Apache License

@Test
public void testDuelStrings() throws Exception {
    Random random = getRandom();//from   w w w . jav  a 2 s  .com
    int atLeast = atLeast(random, 1000);
    for (int i = 0; i < atLeast; i++) {
        Document d = new Document();
        d.add(new StringField("_id", "" + i, Field.Store.NO));
        if (random.nextInt(15) != 0) {
            int[] numbers = getNumbers(random, Integer.MAX_VALUE);
            for (int j : numbers) {
                final String s = English.longToEnglish(j);
                d.add(new StringField("bytes", s, Field.Store.NO));
                if (LuceneTestCase.defaultCodecSupportsSortedSet()) {
                    d.add(new SortedSetDocValuesField("bytes", new BytesRef(s)));
                }
            }
            if (random.nextInt(10) == 0) {
                d.add(new StringField("bytes", "", Field.Store.NO));
                if (LuceneTestCase.defaultCodecSupportsSortedSet()) {
                    d.add(new SortedSetDocValuesField("bytes", new BytesRef()));
                }
            }
        }
        writer.addDocument(d);
        if (random.nextInt(10) == 0) {
            refreshReader();
        }
    }
    AtomicReaderContext context = refreshReader();
    Map<FieldDataType, Type> typeMap = new HashMap<FieldDataType, DuelFieldDataTests.Type>();
    typeMap.put(new FieldDataType("string", ImmutableSettings.builder().put("format", "fst")), Type.Bytes);
    typeMap.put(new FieldDataType("string", ImmutableSettings.builder().put("format", "paged_bytes")),
            Type.Bytes);
    if (LuceneTestCase.defaultCodecSupportsSortedSet()) {
        typeMap.put(new FieldDataType("string", ImmutableSettings.builder().put("format", "doc_values")),
                Type.Bytes);
    }
    // TODO add filters
    ArrayList<Entry<FieldDataType, Type>> list = new ArrayList<Entry<FieldDataType, Type>>(typeMap.entrySet());
    Preprocessor pre = new Preprocessor();
    while (!list.isEmpty()) {
        Entry<FieldDataType, Type> left;
        Entry<FieldDataType, Type> right;
        if (list.size() > 1) {
            left = list.remove(random.nextInt(list.size()));
            right = list.remove(random.nextInt(list.size()));
        } else {
            right = left = list.remove(0);
        }
        ifdService.clear();
        IndexFieldData<?> leftFieldData = getForField(left.getKey(),
                left.getValue().name().toLowerCase(Locale.ROOT));

        ifdService.clear();
        IndexFieldData<?> rightFieldData = getForField(right.getKey(),
                right.getValue().name().toLowerCase(Locale.ROOT));

        duelFieldDataBytes(random, context, leftFieldData, rightFieldData, pre);
        duelFieldDataBytes(random, context, rightFieldData, leftFieldData, pre);

        DirectoryReader perSegment = DirectoryReader.open(writer, true);
        CompositeReaderContext composite = perSegment.getContext();
        List<AtomicReaderContext> leaves = composite.leaves();
        for (AtomicReaderContext atomicReaderContext : leaves) {
            assertOrder(AtomicFieldData.Order.BYTES, leftFieldData, atomicReaderContext);
            assertOrder(AtomicFieldData.Order.BYTES, rightFieldData, atomicReaderContext);
            duelFieldDataBytes(random, atomicReaderContext, leftFieldData, rightFieldData, pre);
        }
        perSegment.close();
    }

}

From source file:org.elasticsearch.index.fielddata.DuelFieldDataTests.java

License:Apache License

public void testDuelGeoPoints() throws Exception {
    final String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
            .startObject("properties").startObject("geopoint").field("type", "geo_point")
            .startObject("fielddata").field("format", "doc_values").endObject().endObject().endObject()
            .endObject().endObject().string();

    final DocumentMapper mapper = MapperTestUtils.newParser().parse(mapping);

    Random random = getRandom();/*from w  ww .j a v  a  2 s .co  m*/
    int atLeast = atLeast(random, 1000);
    int maxValuesPerDoc = randomBoolean() ? 1 : randomIntBetween(2, 40);
    // to test deduplication
    double defaultLat = randomDouble() * 180 - 90;
    double defaultLon = randomDouble() * 360 - 180;
    for (int i = 0; i < atLeast; i++) {
        final int numValues = randomInt(maxValuesPerDoc);
        XContentBuilder doc = XContentFactory.jsonBuilder().startObject().startArray("geopoint");
        for (int j = 0; j < numValues; ++j) {
            if (randomBoolean()) {
                doc.startObject().field("lat", defaultLat).field("lon", defaultLon).endObject();
            } else {
                doc.startObject().field("lat", randomDouble() * 180 - 90)
                        .field("lon", randomDouble() * 360 - 180).endObject();
            }
        }
        doc = doc.endArray().endObject();
        final ParsedDocument d = mapper.parse("type", Integer.toString(i), doc.bytes());

        writer.addDocument(d.rootDoc());
        if (random.nextInt(10) == 0) {
            refreshReader();
        }
    }
    AtomicReaderContext context = refreshReader();
    Map<FieldDataType, Type> typeMap = new HashMap<FieldDataType, DuelFieldDataTests.Type>();
    final Distance precision = new Distance(1, randomFrom(DistanceUnit.values()));
    typeMap.put(new FieldDataType("geo_point", ImmutableSettings.builder().put("format", "array")),
            Type.GeoPoint);
    typeMap.put(
            new FieldDataType("geo_point",
                    ImmutableSettings.builder().put("format", "compressed").put("precision", precision)),
            Type.GeoPoint);
    typeMap.put(new FieldDataType("geo_point", ImmutableSettings.builder().put("format", "doc_values")),
            Type.GeoPoint);

    ArrayList<Entry<FieldDataType, Type>> list = new ArrayList<Entry<FieldDataType, Type>>(typeMap.entrySet());
    while (!list.isEmpty()) {
        Entry<FieldDataType, Type> left;
        Entry<FieldDataType, Type> right;
        if (list.size() > 1) {
            left = list.remove(random.nextInt(list.size()));
            right = list.remove(random.nextInt(list.size()));
        } else {
            right = left = list.remove(0);
        }
        ifdService.clear();
        IndexGeoPointFieldData<?> leftFieldData = getForField(left.getKey(),
                left.getValue().name().toLowerCase(Locale.ROOT));

        ifdService.clear();
        IndexGeoPointFieldData<?> rightFieldData = getForField(right.getKey(),
                right.getValue().name().toLowerCase(Locale.ROOT));

        duelFieldDataGeoPoint(random, context, leftFieldData, rightFieldData, precision);
        duelFieldDataGeoPoint(random, context, rightFieldData, leftFieldData, precision);

        DirectoryReader perSegment = DirectoryReader.open(writer, true);
        CompositeReaderContext composite = perSegment.getContext();
        List<AtomicReaderContext> leaves = composite.leaves();
        for (AtomicReaderContext atomicReaderContext : leaves) {
            duelFieldDataGeoPoint(random, atomicReaderContext, leftFieldData, rightFieldData, precision);
        }
        perSegment.close();
    }
}

From source file:org.elasticsearch.index.shard.ShardUtilsTests.java

License:Apache License

public void testExtractShardId() throws IOException {
    BaseDirectoryWrapper dir = newDirectory();
    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
    writer.commit();/*ww  w . j a  v a  2  s  .c o  m*/
    ShardId id = new ShardId("foo", random().nextInt());
    try (DirectoryReader reader = DirectoryReader.open(writer, random().nextBoolean())) {
        ElasticsearchDirectoryReader wrap = ElasticsearchDirectoryReader.wrap(reader, id);
        assertEquals(id, ShardUtils.extractShardId(wrap));
    }
    final int numDocs = 1 + random().nextInt(5);
    for (int i = 0; i < numDocs; i++) {
        Document d = new Document();
        d.add(newField("name", "foobar", StringField.TYPE_STORED));
        writer.addDocument(d);
        if (random().nextBoolean()) {
            writer.commit();
        }
    }

    try (DirectoryReader reader = DirectoryReader.open(writer, random().nextBoolean())) {
        ElasticsearchDirectoryReader wrap = ElasticsearchDirectoryReader.wrap(reader, id);
        assertEquals(id, ShardUtils.extractShardId(wrap));
        CompositeReaderContext context = wrap.getContext();
        for (LeafReaderContext leaf : context.leaves()) {
            assertEquals(id, ShardUtils.extractShardId(leaf.reader()));
        }
    }
    IOUtils.close(writer, dir);
}

From source file:org.elasticsearch.search.aggregations.AggregatorTestCase.java

License:Apache License

/**
 * Divides the provided {@link IndexSearcher} in sub-searcher, one for each segment,
 * builds an aggregator for each sub-searcher filtered by the provided {@link Query} and
 * returns the reduced {@link InternalAggregation}.
 *//*from ww w  .  j  a  v  a2s . c o  m*/
protected <A extends InternalAggregation, C extends Aggregator> A searchAndReduce(IndexSearcher searcher,
        Query query, AggregationBuilder builder, MappedFieldType... fieldTypes) throws IOException {
    final IndexReaderContext ctx = searcher.getTopReaderContext();

    final ShardSearcher[] subSearchers;
    if (ctx instanceof LeafReaderContext) {
        subSearchers = new ShardSearcher[1];
        subSearchers[0] = new ShardSearcher((LeafReaderContext) ctx, ctx);
    } else {
        final CompositeReaderContext compCTX = (CompositeReaderContext) ctx;
        final int size = compCTX.leaves().size();
        subSearchers = new ShardSearcher[size];
        for (int searcherIDX = 0; searcherIDX < subSearchers.length; searcherIDX++) {
            final LeafReaderContext leave = compCTX.leaves().get(searcherIDX);
            subSearchers[searcherIDX] = new ShardSearcher(leave, compCTX);
        }
    }

    List<InternalAggregation> aggs = new ArrayList<>();
    Query rewritten = searcher.rewrite(query);
    Weight weight = searcher.createWeight(rewritten, true);
    C root = createAggregator(builder, searcher, fieldTypes);
    try {
        for (ShardSearcher subSearcher : subSearchers) {
            C a = createAggregator(builder, subSearcher, fieldTypes);
            a.preCollection();
            subSearcher.search(weight, a);
            a.postCollection();
            aggs.add(a.buildAggregation(0L));
        }
        if (aggs.isEmpty()) {
            return null;
        } else {
            if (randomBoolean()) {
                // sometimes do an incremental reduce
                List<InternalAggregation> internalAggregations = randomSubsetOf(
                        randomIntBetween(1, aggs.size()), aggs);
                A internalAgg = (A) aggs.get(0).doReduce(internalAggregations,
                        new InternalAggregation.ReduceContext(root.context().bigArrays(), null, false));
                aggs.removeAll(internalAggregations);
                aggs.add(internalAgg);
            }
            // now do the final reduce
            @SuppressWarnings("unchecked")
            A internalAgg = (A) aggs.get(0).doReduce(aggs,
                    new InternalAggregation.ReduceContext(root.context().bigArrays(), null, true));
            return internalAgg;
        }
    } finally {
        Releasables.close(releasables);
        releasables.clear();
    }
}

From source file:org.elasticsearch.test.unit.index.fielddata.DuellFieldDataTest.java

License:Apache License

@Test
public void testDuellAllTypesSingleValue() throws Exception {
    long seed = System.currentTimeMillis();
    System.out.println("SEED[testDuellAllTypesSingleValue]: " + seed);
    Random random = new Random(seed);
    int atLeast = atLeast(random, 1000);
    for (int i = 0; i < atLeast; i++) {
        int v = (random.nextBoolean() ? -1 * random.nextInt(Byte.MAX_VALUE) : random.nextInt(Byte.MAX_VALUE));
        Document d = new Document();
        d.add(new StringField("_id", "" + i, Field.Store.NO));
        if (random.nextInt(15) != 0) {
            d.add(new LongField("long", v, Field.Store.NO));
            d.add(new IntField("integer", v, Field.Store.NO));
            d.add(new DoubleField("double", v, Field.Store.NO));
            d.add(new FloatField("float", v, Field.Store.NO));
            d.add(new StringField("bytes", "" + v, Field.Store.NO));
        }//w  ww.j  a  v  a  2  s  .c o  m
        writer.addDocument(d);
        if (random.nextInt(10) == 0) {
            refreshReader();
        }
    }
    AtomicReaderContext context = refreshReader();
    Map<FieldDataType, Type> typeMap = new HashMap<FieldDataType, DuellFieldDataTest.Type>();
    typeMap.put(new FieldDataType("string", ImmutableSettings.builder().put("format", "fst")), Type.Bytes);
    typeMap.put(new FieldDataType("string", ImmutableSettings.builder().put("format", "paged_bytes")),
            Type.Bytes);
    typeMap.put(new FieldDataType("string", ImmutableSettings.builder().put("format", "concrete_bytes")),
            Type.Bytes);
    typeMap.put(new FieldDataType("byte"), Type.Integer);
    typeMap.put(new FieldDataType("short"), Type.Integer);
    typeMap.put(new FieldDataType("int"), Type.Integer);
    typeMap.put(new FieldDataType("long"), Type.Long);
    typeMap.put(new FieldDataType("double"), Type.Double);
    typeMap.put(new FieldDataType("float"), Type.Float);
    ArrayList<Entry<FieldDataType, Type>> list = new ArrayList<Entry<FieldDataType, Type>>(typeMap.entrySet());
    Preprocessor pre = new ToDoublePreprocessor();
    while (!list.isEmpty()) {
        Entry<FieldDataType, Type> left;
        Entry<FieldDataType, Type> right;
        if (list.size() > 1) {
            left = list.remove(random.nextInt(list.size()));
            right = list.remove(random.nextInt(list.size()));
        } else {
            right = left = list.remove(0);
        }
        ifdService.clear();
        IndexFieldData leftFieldData = ifdService
                .getForField(new FieldMapper.Names(left.getValue().name().toLowerCase()), left.getKey());
        ifdService.clear();
        IndexFieldData rightFieldData = ifdService
                .getForField(new FieldMapper.Names(right.getValue().name().toLowerCase()), right.getKey());
        duellFieldDataBytes(random, context, leftFieldData, rightFieldData, pre);
        duellFieldDataBytes(random, context, rightFieldData, leftFieldData, pre);

        DirectoryReader perSegment = DirectoryReader.open(writer, true);
        CompositeReaderContext composite = perSegment.getContext();
        List<AtomicReaderContext> leaves = composite.leaves();
        for (AtomicReaderContext atomicReaderContext : leaves) {
            duellFieldDataBytes(random, atomicReaderContext, leftFieldData, rightFieldData, pre);
        }
    }
}

From source file:org.elasticsearch.test.unit.index.fielddata.DuellFieldDataTest.java

License:Apache License

@Test
public void testDuellIntegers() throws Exception {
    long seed = System.currentTimeMillis();
    System.out.println("SEED[testDuellIntegers]: " + seed);
    Random random = new Random(seed);
    int atLeast = atLeast(random, 1000);
    for (int i = 0; i < atLeast; i++) {
        Document d = new Document();
        d.add(new StringField("_id", "" + i, Field.Store.NO));
        if (random.nextInt(15) != 0) {
            int[] numbers = getNumbers(random, Byte.MAX_VALUE);
            for (int j : numbers) {
                d.add(new LongField("long", j, Field.Store.NO));
                d.add(new IntField("integer", j, Field.Store.NO));
            }//from ww w.jav a2 s .c om
        }
        writer.addDocument(d);
        if (random.nextInt(10) == 0) {
            refreshReader();
        }
    }
    AtomicReaderContext context = refreshReader();
    Map<FieldDataType, Type> typeMap = new HashMap<FieldDataType, DuellFieldDataTest.Type>();
    typeMap.put(new FieldDataType("byte"), Type.Integer);
    typeMap.put(new FieldDataType("short"), Type.Integer);
    typeMap.put(new FieldDataType("int"), Type.Integer);
    typeMap.put(new FieldDataType("long"), Type.Long);
    ArrayList<Entry<FieldDataType, Type>> list = new ArrayList<Entry<FieldDataType, Type>>(typeMap.entrySet());
    while (!list.isEmpty()) {
        Entry<FieldDataType, Type> left;
        Entry<FieldDataType, Type> right;
        if (list.size() > 1) {
            left = list.remove(random.nextInt(list.size()));
            right = list.remove(random.nextInt(list.size()));
        } else {
            right = left = list.remove(0);
        }
        ifdService.clear();
        IndexNumericFieldData leftFieldData = ifdService
                .getForField(new FieldMapper.Names(left.getValue().name().toLowerCase()), left.getKey());
        ifdService.clear();
        IndexNumericFieldData rightFieldData = ifdService
                .getForField(new FieldMapper.Names(right.getValue().name().toLowerCase()), right.getKey());
        duellFieldDataLong(random, context, leftFieldData, rightFieldData);
        duellFieldDataLong(random, context, rightFieldData, leftFieldData);

        DirectoryReader perSegment = DirectoryReader.open(writer, true);
        CompositeReaderContext composite = perSegment.getContext();
        List<AtomicReaderContext> leaves = composite.leaves();
        for (AtomicReaderContext atomicReaderContext : leaves) {
            duellFieldDataLong(random, atomicReaderContext, leftFieldData, rightFieldData);
        }
    }

}