List of usage examples for org.apache.lucene.util BytesRef BytesRef
public BytesRef(CharSequence text)
From source file:io.crate.operation.operator.input.BytesRefInput.java
License:Apache License
public BytesRefInput(String value) { this(new BytesRef(value)); }
From source file:io.crate.operation.projectors.GroupingProjectorBenchmark.java
License:Apache License
@Test public void testGroupByMinBytesRef() throws Exception { Functions functions = new ModulesBuilder().add(new AggregationImplModule()).createInjector() .getInstance(Functions.class); InputCollectExpression keyInput = new InputCollectExpression(0); List<Input<?>> keyInputs = Arrays.<Input<?>>asList(keyInput); CollectExpression[] collectExpressions = new CollectExpression[] { keyInput }; FunctionIdent minStringFuncIdent = new FunctionIdent(MinimumAggregation.NAME, Arrays.<DataType>asList(DataTypes.STRING)); FunctionInfo minStringFuncInfo = new FunctionInfo(minStringFuncIdent, DataTypes.STRING, FunctionInfo.Type.AGGREGATE); AggregationFunction minAgg = (AggregationFunction) functions.get(minStringFuncIdent); Aggregation aggregation = Aggregation.finalAggregation(minStringFuncInfo, Arrays.<Symbol>asList(new InputColumn(0)), Aggregation.Step.ITER); AggregationContext aggregationContext = new AggregationContext(minAgg, aggregation); aggregationContext.addInput(keyInput); AggregationContext[] aggregations = new AggregationContext[] { aggregationContext }; GroupingProjector groupingProjector = new GroupingProjector(Arrays.<DataType>asList(DataTypes.STRING), keyInputs, collectExpressions, aggregations, RAM_ACCOUNTING_CONTEXT); RowReceiver finalReceiver = new RowCountRowReceiver(); groupingProjector.downstream(finalReceiver); groupingProjector.prepare();//from www . j a va 2s. c o m List<BytesRef> keys = new ArrayList<>(Locale.getISOCountries().length); for (String s : Locale.getISOCountries()) { keys.add(new BytesRef(s)); } SpareRow row = new SpareRow(); for (int i = 0; i < 20_000_000; i++) { row.value = keys.get(i % keys.size()); groupingProjector.setNextRow(row); } groupingProjector.finish(RepeatHandle.UNSUPPORTED); }
From source file:io.crate.operation.projectors.IndexWriterCountBatchIteratorTest.java
License:Apache License
@Test public void testIndexWriterIterator() throws Exception { execute("create table bulk_import (id int primary key) with (number_of_replicas=0)"); ensureGreen();/* ww w . j a v a 2 s. c o m*/ Supplier<BatchIterator> sourceSupplier = () -> RowsBatchIterator.newInstance( RowGenerator.fromSingleColValues( () -> IntStream.range(0, 10).mapToObj(i -> new BytesRef("{\"id\": " + i + "}")).iterator()), 1); Supplier<String> indexNameResolver = IndexNameResolver.forTable(new TableIdent(null, "bulk_import")); Input<?> sourceInput = new InputCollectExpression(0); List<CollectExpression<Row, ?>> collectExpressions = Collections .singletonList((InputCollectExpression) sourceInput); List<Object[]> expectedResult = Collections.singletonList(new Object[] { 10L }); BatchIteratorTester tester = new BatchIteratorTester(() -> { RowShardResolver rowShardResolver = getRowShardResolver(); Supplier<ShardUpsertRequest.Item> updateItemSupplier = () -> new ShardUpsertRequest.Item( rowShardResolver.id(), null, new Object[] { sourceInput.value() }, null); BulkShardProcessor bulkShardProcessor = getBulkShardProcessor(); return IndexWriterCountBatchIterator.newIndexInstance(sourceSupplier.get(), indexNameResolver, collectExpressions, rowShardResolver, bulkShardProcessor, updateItemSupplier); }); tester.verifyResultAndEdgeCaseBehaviour(expectedResult); }
From source file:io.crate.operation.projectors.IndexWriterProjectorTest.java
License:Apache License
@Test public void testIndexWriter() throws Throwable { execute("create table bulk_import (id int primary key, name string) with (number_of_replicas=0)"); ensureGreen();/*from w w w. j av a 2 s .co m*/ CollectingRowReceiver collectingRowReceiver = new CollectingRowReceiver(); InputCollectExpression sourceInput = new InputCollectExpression(1); InputColumn sourceInputColumn = new InputColumn(1, StringType.INSTANCE); CollectExpression[] collectExpressions = new CollectExpression[] { sourceInput }; IndexWriterProjector writerProjector = new IndexWriterProjector( internalCluster().getInstance(ClusterService.class), ImmutableSettings.EMPTY, internalCluster().getInstance(TransportActionProvider.class), internalCluster().getInstance(BulkRetryCoordinatorPool.class), new TableIdent(null, "bulk_import"), null, new Reference(new ReferenceInfo(new ReferenceIdent(bulkImportIdent, DocSysColumns.RAW), RowGranularity.DOC, DataTypes.STRING)), Arrays.asList(ID_IDENT), Arrays.<Symbol>asList(new InputColumn(0)), Arrays.<Input<?>>asList(), null, null, sourceInput, sourceInputColumn, collectExpressions, 20, null, null, false, false, UUID.randomUUID()); writerProjector.downstream(collectingRowReceiver); final RowDownstream rowDownstream = RowMergers.passThroughRowMerger(writerProjector); final RowReceiver receiver1 = rowDownstream.newRowReceiver(); receiver1.prepare(mock(ExecutionState.class)); Thread t1 = new Thread(new Runnable() { @Override public void run() { for (int i = 0; i < 100; i++) { receiver1.setNextRow(new RowN( new Object[] { i, new BytesRef("{\"id\": " + i + ", \"name\": \"Arthur\"}") })); } } }); final RowReceiver receiver2 = rowDownstream.newRowReceiver(); receiver2.prepare(mock(ExecutionState.class)); Thread t2 = new Thread(new Runnable() { @Override public void run() { for (int i = 100; i < 200; i++) { receiver2.setNextRow(new RowN( new Object[] { i, new BytesRef("{\"id\": " + i + ", \"name\": \"Trillian\"}") })); } } }); t1.start(); t2.start(); t1.join(); t2.join(); receiver1.finish(); receiver2.finish(); Bucket objects = collectingRowReceiver.result(); assertThat(objects, contains(isRow(200L))); execute("refresh table bulk_import"); execute("select count(*) from bulk_import"); assertThat(response.rowCount(), is(1L)); assertThat((Long) response.rows()[0][0], is(200L)); }
From source file:io.crate.operation.projectors.IndexWriterProjectorUnitTest.java
License:Apache License
@Test public void testNullPKValue() throws Throwable { expectedException.expect(IllegalArgumentException.class); expectedException.expectMessage("A primary key value must not be NULL"); CollectingRowReceiver rowReceiver = new CollectingRowReceiver(); InputCollectExpression sourceInput = new InputCollectExpression(0); InputColumn sourceInputColumn = new InputColumn(0); CollectExpression[] collectExpressions = new CollectExpression[] { sourceInput }; final IndexWriterProjector indexWriter = new IndexWriterProjector(clusterService, ImmutableSettings.EMPTY, mock(TransportActionProvider.class), mock(BulkRetryCoordinatorPool.class, Answers.RETURNS_DEEP_STUBS.get()), new TableIdent(null, "bulk_import"), null, rawSourceReference, ImmutableList.of(ID_IDENT), Arrays.<Symbol>asList(new InputColumn(1)), ImmutableList.<Input<?>>of(), null, null, sourceInput, sourceInputColumn, collectExpressions, 20, null, null, false, false, UUID.randomUUID()); indexWriter.downstream(rowReceiver); indexWriter.prepare(mock(ExecutionState.class)); indexWriter.setNextRow(new RowN(new Object[] { new BytesRef("{\"y\": \"x\"}"), null })); indexWriter.finish();//from w w w .j a v a 2 s. c o m }
From source file:io.crate.operation.projectors.ProjectionToProjectorVisitorTest.java
License:Apache License
@Test public void testGroupProjector() throws Exception { // in(0) in(1) in(0), in(2) // select race, avg(age), count(race), gender ... group by race, gender GroupProjection projection = new GroupProjection(); projection.keys(/*from ww w . ja v a2 s .c o m*/ Arrays.<Symbol>asList(new InputColumn(0, DataTypes.STRING), new InputColumn(2, DataTypes.STRING))); projection.values(Arrays.asList( Aggregation.finalAggregation(avgInfo, Arrays.<Symbol>asList(new InputColumn(1)), Aggregation.Step.ITER), Aggregation.finalAggregation(countInfo, Arrays.<Symbol>asList(new InputColumn(0)), Aggregation.Step.ITER))); Projector projector = visitor.create(projection, RAM_ACCOUNTING_CONTEXT, UUID.randomUUID()); // use a topN projection in order to get sorted outputs TopNProjection topNProjection = new TopNProjection(10, 0, ImmutableList.<Symbol>of(new InputColumn(2, DataTypes.DOUBLE)), new boolean[] { false }, new Boolean[] { null }); topNProjection.outputs( Arrays.<Symbol>asList(new InputColumn(0, DataTypes.STRING), new InputColumn(1, DataTypes.STRING), new InputColumn(2, DataTypes.DOUBLE), new InputColumn(3, DataTypes.LONG))); Projector topNProjector = visitor.create(topNProjection, RAM_ACCOUNTING_CONTEXT, UUID.randomUUID()); projector.downstream(topNProjector); CollectingRowReceiver collector = new CollectingRowReceiver(); topNProjector.downstream(collector); ExecutionState state = mock(ExecutionState.class); collector.prepare(state); topNProjector.prepare(state); projector.prepare(state); assertThat(projector, instanceOf(GroupingProjector.class)); BytesRef human = new BytesRef("human"); BytesRef vogon = new BytesRef("vogon"); BytesRef male = new BytesRef("male"); BytesRef female = new BytesRef("female"); projector.setNextRow(spare(human, 34, male)); projector.setNextRow(spare(human, 22, female)); projector.setNextRow(spare(vogon, 40, male)); projector.setNextRow(spare(vogon, 48, male)); projector.setNextRow(spare(human, 34, male)); projector.finish(); Bucket rows = collector.result(); assertThat(rows, contains(isRow(human, female, 22.0, 1L), isRow(human, male, 34.0, 2L), isRow(vogon, male, 44.0, 2L))); }
From source file:io.crate.operation.projectors.WriterProjectorTest.java
License:Apache License
@Test public void testWriteRawToFile() throws Exception { String fileAbsolutePath = folder.newFile("out.json").getAbsolutePath(); String uri = Paths.get(fileAbsolutePath).toUri().toString(); Settings settings = ImmutableSettings.EMPTY; WriterProjector projector = new WriterProjector(executorService, uri, settings, null, ImmutableSet.<CollectExpression<Row, ?>>of(), new HashMap<ColumnIdent, Object>()); CollectingRowReceiver rowReceiver = new CollectingRowReceiver(); projector.downstream(rowReceiver);/* ww w .ja va 2 s.c o m*/ projector.prepare(mock(ExecutionState.class)); for (int i = 0; i < 5; i++) { projector.setNextRow(new Row1(new BytesRef(String.format(Locale.ENGLISH, "input line %02d", i)))); } projector.finish(); Bucket rows = rowReceiver.result(); assertThat(rows, contains(isRow(5L))); assertEquals( "input line 00\n" + "input line 01\n" + "input line 02\n" + "input line 03\n" + "input line 04\n", TestingHelpers.readFile(fileAbsolutePath)); }
From source file:io.crate.operation.reference.doc.blob.BlobDigestExpression.java
License:Apache License
@Override public BytesRef value() { return new BytesRef(blob.getName()); }
From source file:io.crate.operation.reference.doc.IpColumnReferenceTest.java
License:Apache License
@Override protected void insertValues(IndexWriter writer) throws Exception { for (int i = 0; i < 10; i++) { Document doc = new Document(); InetAddress address = InetAddresses.forString("192.168.0." + i); doc.add(new SortedSetDocValuesField(column, new BytesRef(InetAddressPoint.encode(address)))); if (i == 0) { address = InetAddresses.forString("192.168.0.1"); doc.add(new SortedSetDocValuesField(column_array, new BytesRef(InetAddressPoint.encode(address)))); address = InetAddresses.forString("192.168.0.2"); doc.add(new SortedSetDocValuesField(column_array, new BytesRef(InetAddressPoint.encode(address)))); }/*from w w w.jav a 2 s .co m*/ writer.addDocument(doc); } }
From source file:io.crate.operation.reference.doc.IpColumnReferenceTest.java
License:Apache License
@Test public void testIpExpression() throws Exception { IpColumnReference columnReference = new IpColumnReference(column); columnReference.startCollect(ctx);/*from www .j a v a 2 s. c o m*/ columnReference.setNextReader(readerContext); IndexSearcher searcher = new IndexSearcher(readerContext.reader()); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 20); int i = 0; for (ScoreDoc doc : topDocs.scoreDocs) { columnReference.setNextDocId(doc.doc); assertThat(columnReference.value(), is(new BytesRef("192.168.0." + i))); i++; } }