Example usage for org.apache.lucene.util BytesRef deepCopyOf

List of usage examples for org.apache.lucene.util BytesRef deepCopyOf

Introduction

In this page you can find the example usage for org.apache.lucene.util BytesRef deepCopyOf.

Prototype

public static BytesRef deepCopyOf(BytesRef other) 

Source Link

Document

Creates a new BytesRef that points to a copy of the bytes from other

The returned BytesRef will have a length of other.length and an offset of zero.

Usage

From source file:org.easynet.resource.queryparser.QueryParserBase.java

License:Apache License

protected BytesRef analyzeMultitermTerm(String field, String part, Analyzer analyzerIn) {
    if (analyzerIn == null)
        analyzerIn = getAnalyzer();//from  w ww.j a  v  a2 s .c  om

    TokenStream source = null;
    try {
        source = analyzerIn.tokenStream(field, part);
        source.reset();

        TermToBytesRefAttribute termAtt = source.getAttribute(TermToBytesRefAttribute.class);
        BytesRef bytes = termAtt.getBytesRef();

        if (!source.incrementToken())
            throw new IllegalArgumentException("analyzer returned no terms for multiTerm term: " + part);
        termAtt.fillBytesRef();
        if (source.incrementToken())
            throw new IllegalArgumentException("analyzer returned too many terms for multiTerm term: " + part);
        source.end();
        return BytesRef.deepCopyOf(bytes);
    } catch (IOException e) {
        throw new RuntimeException("Error analyzing multiTerm term: " + part, e);
    } finally {
        IOUtils.closeWhileHandlingException(source);
    }
}

From source file:org.elasticsearch.action.mlt.TransportMoreLikeThisAction.java

License:Apache License

private Object convertField(Field field) {
    if (field.stringValue() != null) {
        return field.stringValue();
    } else if (field.binaryValue() != null) {
        return BytesRef.deepCopyOf(field.binaryValue()).bytes;
    } else if (field.numericValue() != null) {
        return field.numericValue();
    } else {/*  w  ww .  ja va  2  s . c o m*/
        throw new ElasticsearchIllegalStateException(
                "Field should have either a string, numeric or binary value");
    }
}

From source file:org.elasticsearch.common.bytes.AbstractBytesReferenceTestCase.java

License:Apache License

public void testIterator() throws IOException {
    int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 8));
    BytesReference pbr = newBytesReference(length);
    BytesRefIterator iterator = pbr.iterator();
    BytesRef ref;//  ww  w .j  a  v a2s .c  o m
    BytesRefBuilder builder = new BytesRefBuilder();
    while ((ref = iterator.next()) != null) {
        builder.append(ref);
    }
    assertArrayEquals(BytesReference.toBytes(pbr), BytesRef.deepCopyOf(builder.toBytesRef()).bytes);
}

From source file:org.elasticsearch.common.bytes.AbstractBytesReferenceTestCase.java

License:Apache License

public void testSliceIterator() throws IOException {
    int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 8));
    BytesReference pbr = newBytesReference(length);
    int sliceOffset = randomIntBetween(0, pbr.length());
    int sliceLength = randomIntBetween(0, pbr.length() - sliceOffset);
    BytesReference slice = pbr.slice(sliceOffset, sliceLength);
    BytesRefIterator iterator = slice.iterator();
    BytesRef ref = null;//from  www .j  a va2s . c om
    BytesRefBuilder builder = new BytesRefBuilder();
    while ((ref = iterator.next()) != null) {
        builder.append(ref);
    }
    assertArrayEquals(BytesReference.toBytes(slice), BytesRef.deepCopyOf(builder.toBytesRef()).bytes);
}

From source file:org.elasticsearch.common.bytes.AbstractBytesReferenceTestCase.java

License:Apache License

public void testIteratorRandom() throws IOException {
    int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 8));
    BytesReference pbr = newBytesReference(length);
    if (randomBoolean()) {
        int sliceOffset = randomIntBetween(0, pbr.length());
        int sliceLength = randomIntBetween(0, pbr.length() - sliceOffset);
        pbr = pbr.slice(sliceOffset, sliceLength);
    }//from  w  ww.j  a  va  2  s  . co  m

    if (randomBoolean()) {
        pbr = new BytesArray(pbr.toBytesRef());
    }
    BytesRefIterator iterator = pbr.iterator();
    BytesRef ref = null;
    BytesRefBuilder builder = new BytesRefBuilder();
    while ((ref = iterator.next()) != null) {
        builder.append(ref);
    }
    assertArrayEquals(BytesReference.toBytes(pbr), BytesRef.deepCopyOf(builder.toBytesRef()).bytes);
}

From source file:org.elasticsearch.common.bytes.AbstractBytesReferenceTestCase.java

License:Apache License

public void testEquals() throws IOException {
    BytesReference bytesReference = newBytesReference(
            randomIntBetween(100, PAGE_SIZE * randomIntBetween(2, 5)));
    BytesReference copy = bytesReference.slice(0, bytesReference.length());

    // get refs & compare
    assertEquals(copy, bytesReference);//from   w  w  w. java 2 s .c o m
    int sliceFrom = randomIntBetween(0, bytesReference.length());
    int sliceLength = randomIntBetween(0, bytesReference.length() - sliceFrom);
    assertEquals(copy.slice(sliceFrom, sliceLength), bytesReference.slice(sliceFrom, sliceLength));

    BytesRef bytesRef = BytesRef.deepCopyOf(copy.toBytesRef());
    assertEquals(new BytesArray(bytesRef), copy);

    int offsetToFlip = randomIntBetween(0, bytesRef.length - 1);
    int value = ~Byte.toUnsignedInt(bytesRef.bytes[bytesRef.offset + offsetToFlip]);
    bytesRef.bytes[bytesRef.offset + offsetToFlip] = (byte) value;
    assertNotEquals(new BytesArray(bytesRef), copy);
}

From source file:org.elasticsearch.common.bytes.BytesArray.java

License:Apache License

public BytesArray(BytesRef bytesRef, boolean deepCopy) {
    if (deepCopy) {
        BytesRef copy = BytesRef.deepCopyOf(bytesRef);
        bytes = copy.bytes;//from w  w  w.jav a 2  s.  co  m
        offset = copy.offset;
        length = copy.length;
    } else {
        bytes = bytesRef.bytes;
        offset = bytesRef.offset;
        length = bytesRef.length;
    }
}

From source file:org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery.java

License:Apache License

private void getPrefixTerms(ObjectOpenHashSet<Term> terms, final Term prefix, final IndexReader reader)
        throws IOException {
    // SlowCompositeReaderWrapper could be used... but this would merge all terms from each segment into one terms
    // instance, which is very expensive. Therefore I think it is better to iterate over each leaf individually.
    TermsEnum termsEnum = null;/*from ww w  .  j a  v a  2  s  . c  o m*/
    List<AtomicReaderContext> leaves = reader.leaves();
    for (AtomicReaderContext leaf : leaves) {
        Terms _terms = leaf.reader().terms(field);
        if (_terms == null) {
            continue;
        }

        termsEnum = _terms.iterator(termsEnum);
        TermsEnum.SeekStatus seekStatus = termsEnum.seekCeil(prefix.bytes());
        if (TermsEnum.SeekStatus.END == seekStatus) {
            continue;
        }

        for (BytesRef term = termsEnum.term(); term != null; term = termsEnum.next()) {
            if (!StringHelper.startsWith(term, prefix.bytes())) {
                break;
            }

            terms.add(new Term(field, BytesRef.deepCopyOf(term)));
            if (terms.size() >= maxExpansions) {
                return;
            }
        }
    }
}

From source file:org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery.java

License:Apache License

private void getPrefixTerms(ObjectHashSet<Term> terms, final Term prefix, final IndexReader reader)
        throws IOException {
    // SlowCompositeReaderWrapper could be used... but this would merge all terms from each segment into one terms
    // instance, which is very expensive. Therefore I think it is better to iterate over each leaf individually.
    List<LeafReaderContext> leaves = reader.leaves();
    for (LeafReaderContext leaf : leaves) {
        Terms _terms = leaf.reader().terms(field);
        if (_terms == null) {
            continue;
        }//from   w w  w.  ja  v a 2  s.  co m

        TermsEnum termsEnum = _terms.iterator();
        TermsEnum.SeekStatus seekStatus = termsEnum.seekCeil(prefix.bytes());
        if (TermsEnum.SeekStatus.END == seekStatus) {
            continue;
        }

        for (BytesRef term = termsEnum.term(); term != null; term = termsEnum.next()) {
            if (!StringHelper.startsWith(term, prefix.bytes())) {
                break;
            }

            terms.add(new Term(field, BytesRef.deepCopyOf(term)));
            if (terms.size() >= maxExpansions) {
                return;
            }
        }
    }
}

From source file:org.elasticsearch.index.fielddata.AbstractFieldDataImplTests.java

License:Apache License

private HashedBytesRef convert(BytesValues values, int doc) {
    if (values.setDocument(doc) > 0) {
        return new HashedBytesRef(BytesRef.deepCopyOf(values.nextValue()), values.currentValueHash());
    } else {//from ww  w. j  a  v  a 2 s.co  m
        return new HashedBytesRef(new BytesRef());
    }
}