Example usage for org.apache.lucene.document LatLonDocValuesField LatLonDocValuesField

List of usage examples for org.apache.lucene.document LatLonDocValuesField LatLonDocValuesField

Introduction

In this page you can find the example usage for org.apache.lucene.document LatLonDocValuesField LatLonDocValuesField.

Prototype

public LatLonDocValuesField(String name, double latitude, double longitude) 

Source Link

Document

Creates a new LatLonDocValuesField with the specified latitude and longitude

Usage

From source file:org.elasticsearch.index.mapper.LatLonPointFieldMapper.java

License:Apache License

@Override
protected void parse(ParseContext originalContext, GeoPoint point, String geoHash) throws IOException {
    // Geopoint fields, by default, will not be included in _all
    final ParseContext context = originalContext.setIncludeInAllDefault(false);

    if (ignoreMalformed.value() == false) {
        if (point.lat() > 90.0 || point.lat() < -90.0) {
            throw new IllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name());
        }//from w w  w . j a v a  2s.c o  m
        if (point.lon() > 180.0 || point.lon() < -180) {
            throw new IllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name());
        }
    } else {
        GeoUtils.normalizePoint(point);
    }
    if (fieldType().indexOptions() != IndexOptions.NONE) {
        context.doc().add(new LatLonPoint(fieldType().name(), point.lat(), point.lon()));
    }
    if (fieldType().stored()) {
        context.doc().add(new StoredField(fieldType().name(), point.toString()));
    }
    if (fieldType.hasDocValues()) {
        context.doc().add(new LatLonDocValuesField(fieldType().name(), point.lat(), point.lon()));
    }
    // if the mapping contains multifields then use the geohash string
    if (multiFields.iterator().hasNext()) {
        multiFields.parse(this, context.createExternalValueContext(point.geohash()));
    }
}

From source file:org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGridAggregatorTests.java

License:Apache License

public void testFieldMissing() throws IOException {
    testCase(new MatchAllDocsQuery(), "wrong_field", 1, iw -> {
        iw.addDocument(Collections.singleton(new LatLonDocValuesField(FIELD_NAME, 10D, 10D)));
    }, geoHashGrid -> {//  w  w  w .  j  a  v  a2  s  .  co m
        assertEquals(0, geoHashGrid.getBuckets().size());
    });
}

From source file:org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGridAggregatorTests.java

License:Apache License

public void testWithSeveralDocs() throws IOException {
    int precision = randomIntBetween(1, 12);
    int numPoints = randomIntBetween(8, 128);
    Map<String, Integer> expectedCountPerGeoHash = new HashMap<>();
    testCase(new MatchAllDocsQuery(), FIELD_NAME, precision, iw -> {
        List<LatLonDocValuesField> points = new ArrayList<>();
        Set<String> distinctHashesPerDoc = new HashSet<>();
        for (int pointId = 0; pointId < numPoints; pointId++) {
            double lat = (180d * randomDouble()) - 90d;
            double lng = (360d * randomDouble()) - 180d;
            points.add(new LatLonDocValuesField(FIELD_NAME, lat, lng));
            String hash = stringEncode(lng, lat, precision);
            if (distinctHashesPerDoc.contains(hash) == false) {
                expectedCountPerGeoHash.put(hash, expectedCountPerGeoHash.getOrDefault(hash, 0) + 1);
            }//from   w  w w.j  av a2  s .com
            distinctHashesPerDoc.add(hash);
            if (usually()) {
                iw.addDocument(points);
                points.clear();
                distinctHashesPerDoc.clear();
            }
        }
        if (points.size() != 0) {
            iw.addDocument(points);
        }
    }, geoHashGrid -> {
        assertEquals(expectedCountPerGeoHash.size(), geoHashGrid.getBuckets().size());
        for (GeoHashGrid.Bucket bucket : geoHashGrid.getBuckets()) {
            assertEquals((long) expectedCountPerGeoHash.get(bucket.getKeyAsString()), bucket.getDocCount());
        }
    });
}

From source file:org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregatorTests.java

License:Apache License

public void testRandom() throws Exception {
    double top = Double.NEGATIVE_INFINITY;
    double bottom = Double.POSITIVE_INFINITY;
    double posLeft = Double.POSITIVE_INFINITY;
    double posRight = Double.NEGATIVE_INFINITY;
    double negLeft = Double.POSITIVE_INFINITY;
    double negRight = Double.NEGATIVE_INFINITY;
    int numDocs = randomIntBetween(50, 100);
    try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
        for (int i = 0; i < numDocs; i++) {
            Document doc = new Document();
            int numValues = randomIntBetween(1, 5);
            for (int j = 0; j < numValues; j++) {
                GeoPoint point = RandomGeoGenerator.randomPoint(random());
                if (point.getLat() > top) {
                    top = point.getLat();
                }/*from  www .ja  v  a 2  s .c  om*/
                if (point.getLat() < bottom) {
                    bottom = point.getLat();
                }
                if (point.getLon() >= 0 && point.getLon() < posLeft) {
                    posLeft = point.getLon();
                }
                if (point.getLon() >= 0 && point.getLon() > posRight) {
                    posRight = point.getLon();
                }
                if (point.getLon() < 0 && point.getLon() < negLeft) {
                    negLeft = point.getLon();
                }
                if (point.getLon() < 0 && point.getLon() > negRight) {
                    negRight = point.getLon();
                }
                doc.add(new LatLonDocValuesField("field", point.getLat(), point.getLon()));
            }
            w.addDocument(doc);
        }
        GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder("my_agg").field("field")
                .wrapLongitude(false);

        MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType();
        fieldType.setHasDocValues(true);
        fieldType.setName("field");
        try (IndexReader reader = w.getReader()) {
            IndexSearcher searcher = new IndexSearcher(reader);
            InternalGeoBounds bounds = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
            assertThat(bounds.top, closeTo(top, GEOHASH_TOLERANCE));
            assertThat(bounds.bottom, closeTo(bottom, GEOHASH_TOLERANCE));
            assertThat(bounds.posLeft, closeTo(posLeft, GEOHASH_TOLERANCE));
            assertThat(bounds.posRight, closeTo(posRight, GEOHASH_TOLERANCE));
            assertThat(bounds.negRight, closeTo(negRight, GEOHASH_TOLERANCE));
            assertThat(bounds.negLeft, closeTo(negLeft, GEOHASH_TOLERANCE));
        }
    }
}

From source file:org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregatorTests.java

License:Apache License

public void testUnmapped() throws Exception {
    try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
        GeoCentroidAggregationBuilder aggBuilder = new GeoCentroidAggregationBuilder("my_agg")
                .field("another_field");

        Document document = new Document();
        document.add(new LatLonDocValuesField("field", 10, 10));
        w.addDocument(document);/*  www . ja  v a 2 s .c o m*/
        try (IndexReader reader = w.getReader()) {
            IndexSearcher searcher = new IndexSearcher(reader);

            MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType();
            fieldType.setHasDocValues(true);
            fieldType.setName("another_field");
            InternalGeoCentroid result = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
            assertNull(result.centroid());

            fieldType = new GeoPointFieldMapper.GeoPointFieldType();
            fieldType.setHasDocValues(true);
            fieldType.setName("field");
            result = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);
            assertNull(result.centroid());
        }
    }
}

From source file:org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregatorTests.java

License:Apache License

public void testSingleValuedField() throws Exception {
    int numDocs = scaledRandomIntBetween(64, 256);
    int numUniqueGeoPoints = randomIntBetween(1, numDocs);
    try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {
        GeoPoint expectedCentroid = new GeoPoint(0, 0);
        GeoPoint[] singleValues = new GeoPoint[numUniqueGeoPoints];
        for (int i = 0; i < singleValues.length; i++) {
            singleValues[i] = RandomGeoGenerator.randomPoint(random());
        }/*from  w  ww  . j  a v a2  s  .  c om*/
        GeoPoint singleVal;
        for (int i = 0; i < numDocs; i++) {
            singleVal = singleValues[i % numUniqueGeoPoints];
            Document document = new Document();
            document.add(new LatLonDocValuesField("field", singleVal.getLat(), singleVal.getLon()));
            w.addDocument(document);
            expectedCentroid = expectedCentroid.reset(
                    expectedCentroid.lat() + (singleVal.lat() - expectedCentroid.lat()) / (i + 1),
                    expectedCentroid.lon() + (singleVal.lon() - expectedCentroid.lon()) / (i + 1));
        }
        assertCentroid(w, expectedCentroid);
    }
}

From source file:org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregatorTests.java

License:Apache License

public void testMultiValuedField() throws Exception {
    int numDocs = scaledRandomIntBetween(64, 256);
    int numUniqueGeoPoints = randomIntBetween(1, numDocs);
    try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) {

        GeoPoint expectedCentroid = new GeoPoint(0, 0);
        GeoPoint[] multiValues = new GeoPoint[numUniqueGeoPoints];
        for (int i = 0; i < multiValues.length; i++) {
            multiValues[i] = RandomGeoGenerator.randomPoint(random());
        }//from   ww  w  .  ja  va  2 s  .com
        final GeoPoint[] multiVal = new GeoPoint[2];
        for (int i = 0; i < numDocs; i++) {
            multiVal[0] = multiValues[i % numUniqueGeoPoints];
            multiVal[1] = multiValues[(i + 1) % numUniqueGeoPoints];
            Document document = new Document();
            document.add(new LatLonDocValuesField("field", multiVal[0].getLat(), multiVal[0].getLon()));
            document.add(new LatLonDocValuesField("field", multiVal[1].getLat(), multiVal[1].getLon()));
            w.addDocument(document);
            double newMVLat = (multiVal[0].lat() + multiVal[1].lat()) / 2d;
            double newMVLon = (multiVal[0].lon() + multiVal[1].lon()) / 2d;
            expectedCentroid = expectedCentroid.reset(
                    expectedCentroid.lat() + (newMVLat - expectedCentroid.lat()) / (i + 1),
                    expectedCentroid.lon() + (newMVLon - expectedCentroid.lon()) / (i + 1));
        }
        assertCentroid(w, expectedCentroid);
    }
}

From source file:org.hibernate.search.backend.lucene.types.codec.impl.GeoPointFieldCodec.java

License:LGPL

@Override
public void encode(LuceneDocumentBuilder documentBuilder, String absoluteFieldPath, GeoPoint value) {
    if (value == null) {
        return;//from ww  w .j  a v  a  2s.c  om
    }

    if (Store.YES.equals(store)) {
        documentBuilder.addField(new StoredField(latitudeAbsoluteFieldPath, value.getLatitude()));
        documentBuilder.addField(new StoredField(longitudeAbsoluteFieldPath, value.getLongitude()));
    }
    if (Sortable.YES.equals(sortable)) {
        documentBuilder.addField(
                new LatLonDocValuesField(absoluteFieldPath, value.getLatitude(), value.getLongitude()));
    }

    documentBuilder.addField(new LatLonPoint(absoluteFieldPath, value.getLatitude(), value.getLongitude()));
}

From source file:perf.IndexAndSearchOpenStreetMaps.java

License:Apache License

private static void createIndex(boolean fast, boolean doForceMerge, boolean doDistanceSort)
        throws IOException, InterruptedException {

    CharsetDecoder decoder = StandardCharsets.UTF_8.newDecoder().onMalformedInput(CodingErrorAction.REPORT)
            .onUnmappableCharacter(CodingErrorAction.REPORT);

    int BUFFER_SIZE = 1 << 16; // 64K
    InputStream is;/* w w  w .  j  av a  2s.co m*/
    if (SMALL) {
        is = Files.newInputStream(Paths.get(DATA_LOCATION, "latlon.subsetPlusAllLondon.txt"));
    } else {
        is = Files.newInputStream(Paths.get(DATA_LOCATION, "latlon.txt"));
    }
    BufferedReader reader = new BufferedReader(new InputStreamReader(is, decoder), BUFFER_SIZE);

    int NUM_THREADS;
    if (fast) {
        NUM_THREADS = 4;
    } else {
        NUM_THREADS = 1;
    }

    int CHUNK = 10000;

    long t0 = System.nanoTime();
    AtomicLong totalCount = new AtomicLong();

    for (int part = 0; part < NUM_PARTS; part++) {
        Directory dir = FSDirectory.open(Paths.get(getName(part, doDistanceSort)));

        IndexWriterConfig iwc = new IndexWriterConfig(null);
        iwc.setCodec(getCodec(fast));
        iwc.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
        if (fast) {
            ((TieredMergePolicy) iwc.getMergePolicy()).setMaxMergedSegmentMB(Double.POSITIVE_INFINITY);
            iwc.setRAMBufferSizeMB(1024);
        } else {
            iwc.setMaxBufferedDocs(109630);
            iwc.setMergePolicy(new LogDocMergePolicy());
            iwc.setMergeScheduler(new SerialMergeScheduler());
        }
        iwc.setInfoStream(new PrintStreamInfoStream(System.out));
        IndexWriter w = new IndexWriter(dir, iwc);

        Thread[] threads = new Thread[NUM_THREADS];
        AtomicBoolean finished = new AtomicBoolean();
        Object lock = new Object();

        final int finalPart = part;

        for (int t = 0; t < NUM_THREADS; t++) {
            threads[t] = new Thread() {
                @Override
                public void run() {
                    String[] lines = new String[CHUNK];
                    int chunkCount = 0;
                    while (finished.get() == false) {
                        try {
                            int count = CHUNK;
                            synchronized (lock) {
                                for (int i = 0; i < CHUNK; i++) {
                                    String line = reader.readLine();
                                    if (line == null) {
                                        count = i;
                                        finished.set(true);
                                        break;
                                    }
                                    lines[i] = line;
                                }
                                if (finalPart == 0 && totalCount.get() + count >= 2000000000) {
                                    finished.set(true);
                                }
                            }

                            for (int i = 0; i < count; i++) {
                                String[] parts = lines[i].split(",");
                                //long id = Long.parseLong(parts[0]);
                                double lat = Double.parseDouble(parts[1]);
                                double lon = Double.parseDouble(parts[2]);
                                Document doc = new Document();
                                if (useGeoPoint) {
                                    doc.add(new GeoPointField("point", lat, lon, Field.Store.NO));
                                } else if (useGeo3D || useGeo3DLarge) {
                                    doc.add(new Geo3DPoint("point", lat, lon));
                                } else {
                                    doc.add(new LatLonPoint("point", lat, lon));
                                    if (doDistanceSort) {
                                        doc.add(new LatLonDocValuesField("point", lat, lon));
                                    }
                                }
                                w.addDocument(doc);
                                long x = totalCount.incrementAndGet();
                                if (x % 1000000 == 0) {
                                    System.out.println(x + "...");
                                }
                            }
                            chunkCount++;
                            if (false && SMALL == false && chunkCount == 20000) {
                                System.out.println("NOW BREAK EARLY");
                                break;
                            }
                        } catch (IOException ioe) {
                            throw new RuntimeException(ioe);
                        }
                    }
                }
            };
            threads[t].start();
        }

        for (Thread thread : threads) {
            thread.join();
        }

        System.out.println("Part " + part + " is done: w.maxDoc()=" + w.maxDoc());
        w.commit();
        System.out.println("done commit");
        long t1 = System.nanoTime();
        System.out.println(((t1 - t0) / 1000000000.0) + " sec to index part " + part);
        if (doForceMerge) {
            w.forceMerge(1);
            long t2 = System.nanoTime();
            System.out.println(((t2 - t1) / 1000000000.0) + " sec to force merge part " + part);
        }
        w.close();
    }

    //System.out.println(totalCount.get() + " total docs");
    //System.out.println("Force merge...");
    //w.forceMerge(1);
    //long t2 = System.nanoTime();
    //System.out.println(((t2-t1)/1000000000.0) + " sec to force merge");

    //w.close();
    //long t3 = System.nanoTime();
    //System.out.println(((t3-t2)/1000000000.0) + " sec to close");
    //System.out.println(((t3-t2)/1000000000.0) + " sec to close");
}