Example usage for org.apache.lucene.analysis.payloads PayloadHelper encodeInt

List of usage examples for org.apache.lucene.analysis.payloads PayloadHelper encodeInt

Introduction

In this page you can find the example usage for org.apache.lucene.analysis.payloads PayloadHelper encodeInt.

Prototype

public static byte[] encodeInt(int payload) 

Source Link

Usage

From source file:at.ac.univie.mminf.luceneSKOS.analysis.AbstractMeSHFilter.java

License:Apache License

/**
 * Replaces the current term (attributes) with term (attributes) from the
 * stack/*  w ww. j a v  a  2 s.c o  m*/
 * 
 * @throws IOException
 */
protected void processTermOnStack() throws IOException {
    ExpandedTerm expandedTerm = termStack.pop();

    String term = expandedTerm.getTerm();

    SKOSType termType = expandedTerm.getTermType();

    String sTerm = "";

    try {
        sTerm = analyze(analyzer, term, new CharsRef()).toString();
    } catch (IllegalArgumentException e) {
        // skip this term
        return;
    }

    /*
     * copies the values of all attribute implementations from this state into
     * the implementations of the target stream
     */
    restoreState(current);

    /*
     * Adds the expanded term to the term buffer
     */
    termAtt.setEmpty().append(sTerm);

    /*
     * set position increment to zero to put multiple terms into the same
     * position
     */
    posIncrAtt.setPositionIncrement(0);

    /*
     * sets the type of the expanded term (pref, alt, broader, narrower, etc.)
     */
    skosAtt.setSkosType(termType);

    /*
     * converts the SKOS Attribute to a payload, which is propagated to the
     * index
     */
    byte[] bytes = PayloadHelper.encodeInt(skosAtt.getSkosType().ordinal());
    payloadAtt.setPayload(new BytesRef(bytes));
}

From source file:at.ac.univie.mminf.luceneSKOS.analysis.SNOMEDFilter.java

License:Apache License

/**
 * Replaces the current term (attributes) with term (attributes) from the
 * stack/*from  w w  w . jav  a 2s.  c  om*/
 * 
 * @throws IOException
 */
protected void processTermOnStack() throws IOException {
    ExpandedTerm expandedTerm = termStack.pop();

    String term = expandedTerm.getTerm();

    SKOSType termType = expandedTerm.getTermType();

    String sTerm = "";

    try {
        sTerm = analyze(analyzer, term, new CharsRef()).toString();
    } catch (IllegalArgumentException e) {
        // skip this term
        return;
    }

    /*
     * copies the values of all attribute implementations from this state
     * into the implementations of the target stream
     */
    restoreState(current);

    /*
     * Adds the expanded term to the term buffer
     */
    termAtt.setEmpty().append(sTerm);

    /*
     * set position increment to zero to put multiple terms into the same
     * position
     */
    posIncrAtt.setPositionIncrement(0);

    /*
     * sets the type of the expanded term (pref, alt, broader, narrower,
     * etc.)
     */
    skosAtt.setSkosType(termType);

    /*
     * converts the SKOS Attribute to a payload, which is propagated to the
     * index
     */
    byte[] bytes = PayloadHelper.encodeInt(skosAtt.getSkosType().ordinal());
    payloadAtt.setPayload(new BytesRef(bytes));
}

From source file:at.ac.univie.mminf.luceneSKOS.index.SKOSTypePayload.java

License:Apache License

public SKOSTypePayload(SKOSTypeAttribute skosAtt) {
    super();/*from  w  ww .  j a  v a 2s . c  om*/
    int payload = skosAtt.getSkosType().ordinal();
    byte[] bytes = PayloadHelper.encodeInt(payload);
    super.setData(bytes);
}

From source file:edu.cmu.lti.oaqa.annographix.solr.AnnotEncoderVer3.java

License:Apache License

@Override
public BytesRef encode(char[] buffer, int offset, int length) {
    int sep1pos = -1, sep2pos = -1, sep3pos = -1, sepQty = 0;

    for (int i = 0; i < length; ++i) {
        char c = buffer[offset + i];
        if (c == UtilConst.PAYLOAD_ID_SEP_CHAR) {
            ++sepQty;/*ww w  .j av  a2 s.  com*/
            if (1 == sepQty)
                sep1pos = i;
            else if (2 == sepQty)
                sep2pos = i;
            else if (3 == sepQty)
                sep3pos = i;
            else {
                String errData = new String(buffer, offset, length);
                throw new RuntimeException("Cannot parse payload input: " + errData);
            }
        }
    }

    int wordStartPos = ArrayUtil.parseInt(buffer, offset, sep1pos);
    int wordEndPos = ArrayUtil.parseInt(buffer, offset + sep1pos + 1, sep2pos - sep1pos - 1);
    int annotId = ArrayUtil.parseInt(buffer, offset + sep2pos + 1, sep3pos - sep2pos - 1);
    int parentId = ArrayUtil.parseInt(buffer, offset + sep3pos + 1, length - sep3pos - 1);

    BytesRef result = new BytesRef(PayloadHelper.encodeInt(wordStartPos));
    result.append(new BytesRef(PayloadHelper.encodeInt(wordEndPos)));
    result.append(new BytesRef(PayloadHelper.encodeInt(annotId)));
    result.append(new BytesRef(PayloadHelper.encodeInt(parentId)));
    return result;
}

From source file:org.elasticsearch.action.termvector.GetTermVectorTests.java

License:Apache License

private Map<String, List<BytesRef>> createPayloads(String[] tokens, int encoding) {
    Map<String, List<BytesRef>> payloads = new HashMap<String, List<BytesRef>>();
    for (String token : tokens) {
        if (payloads.get(token) == null) {
            payloads.put(token, new ArrayList<BytesRef>());
        }/*from  w  w  w.  ja  v  a 2 s . co m*/
        boolean createPayload = randomBoolean();
        if (createPayload) {
            switch (encoding) {
            case 0: {
                float theFloat = randomFloat();
                payloads.get(token).add(new BytesRef(PayloadHelper.encodeFloat(theFloat)));
                break;
            }
            case 1: {
                payloads.get(token).add(new BytesRef(PayloadHelper.encodeInt(randomInt())));
                break;
            }
            case 2: {
                String payload = randomUnicodeOfLengthBetween(50, 100);
                for (int c = 0; c < payload.length(); c++) {
                    if (Character.isWhitespace(payload.charAt(c))) {
                        payload = payload.replace(payload.charAt(c), 'w');
                    }
                }
                payloads.get(token).add(new BytesRef(payload));
                break;
            }
            default: {
                throw new ElasticsearchException("unsupported encoding type");
            }
            }
        } else {
            payloads.get(token).add(new BytesRef());
        }
    }
    return payloads;
}

From source file:org.elasticsearch.action.termvectors.GetTermVectorsIT.java

License:Apache License

private Map<String, List<BytesRef>> createPayloads(String[] tokens, int encoding) {
    Map<String, List<BytesRef>> payloads = new HashMap<>();
    for (String token : tokens) {
        if (payloads.get(token) == null) {
            payloads.put(token, new ArrayList<BytesRef>());
        }//w  w  w  . j a va  2s. co m
        boolean createPayload = randomBoolean();
        if (createPayload) {
            switch (encoding) {
            case 0: {
                float theFloat = randomFloat();
                payloads.get(token).add(new BytesRef(PayloadHelper.encodeFloat(theFloat)));
                break;
            }
            case 1: {
                payloads.get(token).add(new BytesRef(PayloadHelper.encodeInt(randomInt())));
                break;
            }
            case 2: {
                String payload = randomUnicodeOfLengthBetween(50, 100);
                for (int c = 0; c < payload.length(); c++) {
                    if (Character.isWhitespace(payload.charAt(c))) {
                        payload = payload.replace(payload.charAt(c), 'w');
                    }
                }
                payloads.get(token).add(new BytesRef(payload));
                break;
            }
            default: {
                throw new ElasticsearchException("unsupported encoding type");
            }
            }
        } else {
            payloads.get(token).add(new BytesRef());
        }
    }
    return payloads;
}

From source file:org.xbib.elasticsearch.index.analysis.skos.AbstractSKOSFilter.java

License:Apache License

/**
 * Replaces the current term (attributes) with term (attributes) from the
 * stack/* www  .  j av  a 2s  .c  om*/
 *
 * @throws IOException
 */
protected void processTermOnStack() throws IOException {
    ExpandedTerm expandedTerm = termStack.pop();

    String term = expandedTerm.getTerm();

    SKOSType termType = expandedTerm.getTermType();

    String sTerm;
    try {
        sTerm = analyze(analyzer, term, new CharsRef()).toString();
    } catch (IllegalArgumentException e) {
        // skip this term
        return;
    }

    /*
     * copies the values of all attribute implementations from this state into
     * the implementations of the target stream
     */
    restoreState(current);

    /*
     * Adds the expanded term to the term buffer
     */
    termAtt.setEmpty().append(sTerm);

    /*
     * set position increment to zero to put multiple terms into the same
     * position
     */
    posIncrAtt.setPositionIncrement(0);

    /*
     * sets the type of the expanded term (pref, alt, broader, narrower, etc.)
     */
    skosAtt.setSkosType(termType);

    /*
     * converts the SKOS Attribute to a payload, which is propagated to the
     * index
     */
    byte[] bytes = PayloadHelper.encodeInt(skosAtt.getSkosType().ordinal());
    payloadAtt.setPayload(new BytesRef(bytes));
}