List of usage examples for org.apache.lucene.util BytesRef hashCode
@Override public int hashCode()
From source file:org.codelibs.elasticsearch.common.util.BytesRefHash.java
License:Apache License
/** Sugar for {#find(BytesRef, int) find(key, key.hashCode()} */ public long find(BytesRef key) { return find(key, key.hashCode()); }
From source file:org.codelibs.elasticsearch.common.util.BytesRefHash.java
License:Apache License
private long set(BytesRef key, int code, long id) { assert rehash(key.hashCode()) == code; assert size < maxSize; final long slot = slot(code, mask); for (long index = slot;; index = nextSlot(index, mask)) { final long curId = id(index); if (curId == -1) { // means unset id(index, id);//from w ww . j av a 2s. c o m append(id, key, code); ++size; return id; } else if (key.bytesEquals(get(curId, spare))) { return -1 - curId; } } }
From source file:org.codelibs.elasticsearch.common.util.BytesRefHash.java
License:Apache License
/** Sugar to {#add(BytesRef, int) add(key, key.hashCode()}. */ public long add(BytesRef key) { return add(key, key.hashCode()); }
From source file:org.elasticsearch.common.lucene.HashedBytesRef.java
License:Apache License
public HashedBytesRef(BytesRef bytes) { this(bytes, bytes.hashCode()); }
From source file:org.elasticsearch.common.lucene.search.XTermsFilter.java
License:Apache License
private XTermsFilter(FieldAndTermEnum iter, int length) { // TODO: maybe use oal.index.PrefixCodedTerms instead? // If number of terms is more than a few hundred it // should be a win // TODO: we also pack terms in FieldCache/DocValues // ... maybe we can refactor to share that code // TODO: yet another option is to build the union of the terms in // an automaton an call intersect on the termsenum if the density is high int hash = 9; byte[] serializedTerms = new byte[0]; this.offsets = new int[length + 1]; int lastEndOffset = 0; int index = 0; ArrayList<TermsAndField> termsAndFields = new ArrayList<TermsAndField>(); TermsAndField lastTermsAndField = null; BytesRef previousTerm = null;/* w w w . j av a 2 s . c om*/ String previousField = null; BytesRef currentTerm; String currentField; while ((currentTerm = iter.next()) != null) { currentField = iter.field(); if (currentField == null) { throw new IllegalArgumentException("Field must not be null"); } if (previousField != null) { // deduplicate if (previousField.equals(currentField)) { if (previousTerm.bytesEquals(currentTerm)) { continue; } } else { final int start = lastTermsAndField == null ? 0 : lastTermsAndField.end; lastTermsAndField = new TermsAndField(start, index, previousField); termsAndFields.add(lastTermsAndField); } } hash = PRIME * hash + currentField.hashCode(); hash = PRIME * hash + currentTerm.hashCode(); if (serializedTerms.length < lastEndOffset + currentTerm.length) { serializedTerms = ArrayUtil.grow(serializedTerms, lastEndOffset + currentTerm.length); } System.arraycopy(currentTerm.bytes, currentTerm.offset, serializedTerms, lastEndOffset, currentTerm.length); offsets[index] = lastEndOffset; lastEndOffset += currentTerm.length; index++; previousTerm = currentTerm; previousField = currentField; } offsets[index] = lastEndOffset; final int start = lastTermsAndField == null ? 0 : lastTermsAndField.end; lastTermsAndField = new TermsAndField(start, index, previousField); termsAndFields.add(lastTermsAndField); this.termsBytes = ArrayUtil.shrink(serializedTerms, lastEndOffset); this.termsAndFields = termsAndFields.toArray(new TermsAndField[termsAndFields.size()]); this.hashCode = hash; }
From source file:org.elasticsearch.common.util.BytesRefHash.java
License:Apache License
private long set(BytesRef key, int code, long id) { assert rehash(key.hashCode()) == code; assert size < maxSize; final long slot = slot(code, mask); for (long index = slot;; index = nextSlot(index, mask)) { final long curId = id(index); if (curId == -1) { // means unset id(index, id);//from ww w .j av a2s .c o m append(id, key, code); ++size; return id; } else if (UnsafeUtils.equals(key, get(curId, spare))) { return -1 - curId; } } }
From source file:org.elasticsearch.common.util.BytesRefHashTests.java
License:Apache License
public void testDuell() { final int len = randomIntBetween(1, 100000); final BytesRef[] values = new BytesRef[len]; for (int i = 0; i < values.length; ++i) { values[i] = new BytesRef(randomAsciiOfLength(5)); }/* w ww. j a va 2 s . com*/ final ObjectLongMap<BytesRef> valueToId = new ObjectLongOpenHashMap<BytesRef>(); final BytesRef[] idToValue = new BytesRef[values.length]; final int iters = randomInt(1000000); for (int i = 0; i < iters; ++i) { final BytesRef value = randomFrom(values); if (valueToId.containsKey(value)) { assertEquals(-1 - valueToId.get(value), hash.add(value, value.hashCode())); } else { assertEquals(valueToId.size(), hash.add(value, value.hashCode())); idToValue[valueToId.size()] = value; valueToId.put(value, valueToId.size()); } } assertEquals(valueToId.size(), hash.size()); for (Iterator<ObjectLongCursor<BytesRef>> iterator = valueToId.iterator(); iterator.hasNext();) { final ObjectLongCursor<BytesRef> next = iterator.next(); assertEquals(next.value, hash.find(next.key, next.key.hashCode())); } for (long i = 0; i < hash.capacity(); ++i) { final long id = hash.id(i); BytesRef spare = new BytesRef(); if (id >= 0) { hash.get(id, spare); assertEquals(idToValue[(int) id], spare); } } hash.release(); }
From source file:org.elasticsearch.index.fielddata.DuelFieldDataTests.java
License:Apache License
private static void duelFieldDataBytes(Random random, AtomicReaderContext context, IndexFieldData<?> left, IndexFieldData<?> right, Preprocessor pre) throws Exception { AtomicFieldData<?> leftData = random.nextBoolean() ? left.load(context) : left.loadDirect(context); AtomicFieldData<?> rightData = random.nextBoolean() ? right.load(context) : right.loadDirect(context); assertThat(leftData.getNumDocs(), equalTo(rightData.getNumDocs())); int numDocs = leftData.getNumDocs(); BytesValues leftBytesValues = leftData.getBytesValues(random.nextBoolean()); BytesValues rightBytesValues = rightData.getBytesValues(random.nextBoolean()); BytesRef leftSpare = new BytesRef(); BytesRef rightSpare = new BytesRef(); for (int i = 0; i < numDocs; i++) { int numValues = 0; assertThat((numValues = leftBytesValues.setDocument(i)), equalTo(rightBytesValues.setDocument(i))); BytesRef previous = null;/* ww w . j a v a 2s. c o m*/ for (int j = 0; j < numValues; j++) { rightSpare.copyBytes(rightBytesValues.nextValue()); leftSpare.copyBytes(leftBytesValues.nextValue()); assertThat(rightSpare.hashCode(), equalTo(rightBytesValues.currentValueHash())); assertThat(leftSpare.hashCode(), equalTo(leftBytesValues.currentValueHash())); if (previous != null && leftBytesValues.getOrder() == rightBytesValues.getOrder()) { // we can only compare the assertThat(pre.compare(previous, rightSpare), lessThan(0)); } previous = BytesRef.deepCopyOf(rightSpare); pre.toString(rightSpare); pre.toString(leftSpare); assertThat(pre.toString(leftSpare), equalTo(pre.toString(rightSpare))); if (leftSpare.equals(rightSpare)) { assertThat(leftBytesValues.currentValueHash(), equalTo(rightBytesValues.currentValueHash())); } } } }
From source file:org.elasticsearch.index.fielddata.plain.ConcreteBytesRefAtomicFieldData.java
License:Apache License
@Override public HashedBytesValues.WithOrdinals getHashedBytesValues() { if (hashes == null) { int[] hashes = new int[values.length]; for (int i = 0; i < values.length; i++) { BytesRef value = values[i]; hashes[i] = value == null ? 0 : value.hashCode(); }//from w w w . j a v a2 s . c o m this.hashes = hashes; } return ordinals.isMultiValued() ? new HashedBytesValues.Multi(values, hashes, ordinals.ordinals()) : new HashedBytesValues.Single(values, hashes, ordinals.ordinals()); }
From source file:org.elasticsearch.index.fielddata.plain.PagedBytesAtomicFieldData.java
License:Apache License
private final IntArray getHashes() { if (hashes == null) { long numberOfValues = termOrdToBytesOffset.size(); IntArray hashes = BigArrays.newIntArray(numberOfValues); BytesRef scratch = new BytesRef(); for (long i = 0; i < numberOfValues; i++) { bytes.fill(scratch, termOrdToBytesOffset.get(i)); hashes.set(i, scratch.hashCode()); }/*from w w w .j a v a 2s . c om*/ this.hashes = hashes; } return hashes; }