Example usage for org.apache.lucene.util.automaton Operations DEFAULT_MAX_DETERMINIZED_STATES

List of usage examples for org.apache.lucene.util.automaton Operations DEFAULT_MAX_DETERMINIZED_STATES

Introduction

In this page you can find the example usage for org.apache.lucene.util.automaton Operations DEFAULT_MAX_DETERMINIZED_STATES.

Prototype

int DEFAULT_MAX_DETERMINIZED_STATES

To view the source code for org.apache.lucene.util.automaton Operations DEFAULT_MAX_DETERMINIZED_STATES.

Click Source Link

Document

Default maximum number of states that Operations#determinize should create.

Usage

From source file:com.qwazr.search.query.RegexpQuery.java

License:Apache License

@Override
final public Query getQuery(QueryContext queryContext) throws IOException {
    return new org.apache.lucene.search.RegexpQuery(new Term(field, text), flags == null ? RegExp.ALL : flags,
            max_determinized_states == null ? Operations.DEFAULT_MAX_DETERMINIZED_STATES
                    : max_determinized_states);
}

From source file:org.codelibs.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude.java

License:Apache License

private Automaton toAutomaton() {
    Automaton a = null;/*from   w ww. j av  a 2 s .c o  m*/
    if (include != null) {
        a = include.toAutomaton();
    } else if (includeValues != null) {
        a = Automata.makeStringUnion(includeValues);
    } else {
        a = Automata.makeAnyString();
    }
    if (exclude != null) {
        a = Operations.minus(a, exclude.toAutomaton(), Operations.DEFAULT_MAX_DETERMINIZED_STATES);
    } else if (excludeValues != null) {
        a = Operations.minus(a, Automata.makeStringUnion(excludeValues),
                Operations.DEFAULT_MAX_DETERMINIZED_STATES);
    }
    return a;
}

From source file:org.elasticsearch.index.mapper.completion.CompletionFieldMapperTests.java

License:Apache License

public void testFuzzyQueryType() throws Exception {
    String mapping = jsonBuilder().startObject().startObject("type1").startObject("properties")
            .startObject("completion").field("type", "completion").endObject().endObject().endObject()
            .endObject().string();//from ww w.j a va2s. c o  m

    DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
    FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion");
    CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper;
    Query prefixQuery = completionFieldMapper.fieldType().fuzzyQuery("co",
            Fuzziness.fromEdits(FuzzyCompletionQuery.DEFAULT_MAX_EDITS),
            FuzzyCompletionQuery.DEFAULT_NON_FUZZY_PREFIX, FuzzyCompletionQuery.DEFAULT_MIN_FUZZY_LENGTH,
            Operations.DEFAULT_MAX_DETERMINIZED_STATES, FuzzyCompletionQuery.DEFAULT_TRANSPOSITIONS,
            FuzzyCompletionQuery.DEFAULT_UNICODE_AWARE);
    assertThat(prefixQuery, instanceOf(FuzzyCompletionQuery.class));
}

From source file:org.elasticsearch.index.mapper.completion.CompletionFieldMapperTests.java

License:Apache License

public void testRegexQueryType() throws Exception {
    String mapping = jsonBuilder().startObject().startObject("type1").startObject("properties")
            .startObject("completion").field("type", "completion").endObject().endObject().endObject()
            .endObject().string();/*from  w w w  .j  a  va 2  s .  c o m*/

    DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
    FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion");
    CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper;
    Query prefixQuery = completionFieldMapper.fieldType().regexpQuery(new BytesRef("co"), RegExp.ALL,
            Operations.DEFAULT_MAX_DETERMINIZED_STATES);
    assertThat(prefixQuery, instanceOf(RegexCompletionQuery.class));
}

From source file:org.elasticsearch.index.mapper.CompletionFieldMapperTests.java

License:Apache License

public void testFuzzyQueryType() throws Exception {
    String mapping = jsonBuilder().startObject().startObject("type1").startObject("properties")
            .startObject("completion").field("type", "completion").endObject().endObject().endObject()
            .endObject().string();//ww w .  j a  v  a 2 s  .c  o  m

    DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1",
            new CompressedXContent(mapping));
    FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion");
    CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper;
    Query prefixQuery = completionFieldMapper.fieldType().fuzzyQuery("co",
            Fuzziness.fromEdits(FuzzyCompletionQuery.DEFAULT_MAX_EDITS),
            FuzzyCompletionQuery.DEFAULT_NON_FUZZY_PREFIX, FuzzyCompletionQuery.DEFAULT_MIN_FUZZY_LENGTH,
            Operations.DEFAULT_MAX_DETERMINIZED_STATES, FuzzyCompletionQuery.DEFAULT_TRANSPOSITIONS,
            FuzzyCompletionQuery.DEFAULT_UNICODE_AWARE);
    assertThat(prefixQuery, instanceOf(FuzzyCompletionQuery.class));
}

From source file:org.elasticsearch.index.mapper.CompletionFieldMapperTests.java

License:Apache License

public void testRegexQueryType() throws Exception {
    String mapping = jsonBuilder().startObject().startObject("type1").startObject("properties")
            .startObject("completion").field("type", "completion").endObject().endObject().endObject()
            .endObject().string();/*ww  w. j a v  a2  s .  co m*/

    DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1",
            new CompressedXContent(mapping));
    FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion");
    CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper;
    Query prefixQuery = completionFieldMapper.fieldType().regexpQuery(new BytesRef("co"), RegExp.ALL,
            Operations.DEFAULT_MAX_DETERMINIZED_STATES);
    assertThat(prefixQuery, instanceOf(RegexCompletionQuery.class));
}

From source file:org.elasticsearch.index.reindex.TransportReindexAction.java

License:Apache License

/**
 * Build the {@link CharacterRunAutomaton} that represents the reindex-from-remote whitelist and make sure that it doesn't whitelist
 * the world.//from   ww w  .  ja  va  2 s .c  o m
 */
static CharacterRunAutomaton buildRemoteWhitelist(List<String> whitelist) {
    if (whitelist.isEmpty()) {
        return new CharacterRunAutomaton(Automata.makeEmpty());
    }
    Automaton automaton = Regex.simpleMatchToAutomaton(whitelist.toArray(Strings.EMPTY_ARRAY));
    automaton = MinimizationOperations.minimize(automaton, Operations.DEFAULT_MAX_DETERMINIZED_STATES);
    if (Operations.isTotal(automaton)) {
        throw new IllegalArgumentException("Refusing to start because whitelist " + whitelist
                + " accepts all addresses. "
                + "This would allow users to reindex-from-remote any URL they like effectively having Elasticsearch make HTTP GETs "
                + "for them.");
    }
    return new CharacterRunAutomaton(automaton);
}

From source file:org.elasticsearch.xpack.core.security.authz.accesscontrol.FieldSubsetReaderTests.java

License:Open Source License

public void testSourceFiltering() {
    // include on top-level value
    Map<String, Object> map = new HashMap<>();
    map.put("foo", 3);
    map.put("bar", "baz");

    CharacterRunAutomaton include = new CharacterRunAutomaton(Automata.makeString("foo"));
    Map<String, Object> filtered = FieldSubsetReader.filter(map, include, 0);
    Map<String, Object> expected = new HashMap<>();
    expected.put("foo", 3);

    assertEquals(expected, filtered);//from  www  .  j a va 2 s .co  m

    // include on inner wildcard
    map = new HashMap<>();
    Map<String, Object> subMap = new HashMap<>();
    subMap.put("bar", 42);
    subMap.put("baz", 6);
    map.put("foo", subMap);
    map.put("bar", "baz");

    include = new CharacterRunAutomaton(Automatons.patterns("foo.*"));
    filtered = FieldSubsetReader.filter(map, include, 0);
    expected = new HashMap<>();
    expected.put("foo", subMap);

    assertEquals(expected, filtered);

    // include on leading wildcard
    include = new CharacterRunAutomaton(Automatons.patterns("*.bar"));
    filtered = FieldSubsetReader.filter(map, include, 0);
    expected = new HashMap<>();
    subMap = new HashMap<>();
    subMap.put("bar", 42);
    expected.put("foo", subMap);

    assertEquals(expected, filtered);

    // include on inner value
    include = new CharacterRunAutomaton(Automatons.patterns("foo.bar"));
    filtered = FieldSubsetReader.filter(map, include, 0);

    assertEquals(expected, filtered);

    // exclude on exact value
    include = new CharacterRunAutomaton(Operations.minus(Automata.makeAnyString(),
            Automatons.patterns("foo.bar"), Operations.DEFAULT_MAX_DETERMINIZED_STATES));
    filtered = FieldSubsetReader.filter(map, include, 0);
    expected = new HashMap<>();
    expected.put("bar", "baz");
    expected.put("foo", Collections.singletonMap("baz", 6));

    assertEquals(expected, filtered);

    // exclude on wildcard
    include = new CharacterRunAutomaton(Operations.minus(Automata.makeAnyString(), Automatons.patterns("foo.*"),
            Operations.DEFAULT_MAX_DETERMINIZED_STATES));
    filtered = FieldSubsetReader.filter(map, include, 0);
    expected = Collections.singletonMap("bar", "baz");

    assertEquals(expected, filtered);

    // include on inner array
    map = new HashMap<>();
    List<Object> subArray = new ArrayList<>();
    subMap = new HashMap<>();
    subMap.put("bar", 42);
    subMap.put("baz", "foo");
    subArray.add(subMap);
    subArray.add(12);
    map.put("foo", subArray);

    include = new CharacterRunAutomaton(Automatons.patterns("foo.bar"));
    filtered = FieldSubsetReader.filter(map, include, 0);
    expected = new HashMap<>();
    subArray = new ArrayList<>();
    subMap = new HashMap<>();
    subMap.put("bar", 42);
    subArray.add(subMap);
    expected.put("foo", subArray);

    assertEquals(expected, filtered);

    // include on inner array 2
    include = new CharacterRunAutomaton(Automatons.patterns("foo"));
    filtered = FieldSubsetReader.filter(map, include, 0);
    expected = new HashMap<>();
    subArray = new ArrayList<>();
    subArray.add(12);
    expected.put("foo", subArray);

    assertEquals(expected, filtered);

    // exclude on inner array
    include = new CharacterRunAutomaton(Operations.minus(Automata.makeAnyString(),
            Automatons.patterns("foo.baz"), Operations.DEFAULT_MAX_DETERMINIZED_STATES));
    filtered = FieldSubsetReader.filter(map, include, 0);
    expected = new HashMap<>();
    subArray = new ArrayList<>();
    subMap = new HashMap<>();
    subMap.put("bar", 42);
    subArray.add(subMap);
    subArray.add(12);
    expected.put("foo", subArray);

    assertEquals(expected, filtered);

    // exclude on inner array 2
    include = new CharacterRunAutomaton(Operations.minus(Automata.makeAnyString(), Automatons.patterns("foo"),
            Operations.DEFAULT_MAX_DETERMINIZED_STATES));
    filtered = FieldSubsetReader.filter(map, include, 0);
    expected = new HashMap<>();
    subArray = new ArrayList<>();
    subMap = new HashMap<>();
    subMap.put("bar", 42);
    subMap.put("baz", "foo");
    subArray.add(subMap);
    expected.put("foo", subArray);

    assertEquals(expected, filtered);

    // json array objects that have no matching fields should be left empty instead of being removed:
    // (otherwise nested inner hit source filtering fails with AOOB)
    map = new HashMap<>();
    map.put("foo", "value");
    List<Map<?, ?>> values = new ArrayList<>();
    values.add(Collections.singletonMap("foo", "1"));
    values.add(Collections.singletonMap("baz", "2"));
    map.put("bar", values);

    include = new CharacterRunAutomaton(Automatons.patterns("bar.baz"));
    filtered = FieldSubsetReader.filter(map, include, 0);

    expected = new HashMap<>();
    expected.put("bar", Arrays.asList(new HashMap<>(), Collections.singletonMap("baz", "2")));
    assertEquals(expected, filtered);
}

From source file:org.elasticsearch.xpack.core.security.authz.permission.FieldPermissions.java

License:Open Source License

private static Automaton initializePermittedFieldsAutomaton(final String[] grantedFields,
        final String[] deniedFields) {
    Automaton grantedFieldsAutomaton;//from  w w w . java  2 s . c om
    if (grantedFields == null || Arrays.stream(grantedFields).anyMatch(Regex::isMatchAllPattern)) {
        grantedFieldsAutomaton = Automatons.MATCH_ALL;
    } else {
        // an automaton that includes metadata fields, including join fields created by the _parent field such
        // as _parent#type
        Automaton metaFieldsAutomaton = Operations.concatenate(Automata.makeChar('_'),
                Automata.makeAnyString());
        grantedFieldsAutomaton = Operations.union(Automatons.patterns(grantedFields), metaFieldsAutomaton);
    }

    Automaton deniedFieldsAutomaton;
    if (deniedFields == null || deniedFields.length == 0) {
        deniedFieldsAutomaton = Automatons.EMPTY;
    } else {
        deniedFieldsAutomaton = Automatons.patterns(deniedFields);
    }

    grantedFieldsAutomaton = MinimizationOperations.minimize(grantedFieldsAutomaton,
            Operations.DEFAULT_MAX_DETERMINIZED_STATES);
    deniedFieldsAutomaton = MinimizationOperations.minimize(deniedFieldsAutomaton,
            Operations.DEFAULT_MAX_DETERMINIZED_STATES);

    if (subsetOf(deniedFieldsAutomaton, grantedFieldsAutomaton) == false) {
        throw new ElasticsearchSecurityException("Exceptions for field permissions must be a subset of the "
                + "granted fields but " + Strings.arrayToCommaDelimitedString(deniedFields)
                + " is not a subset of " + Strings.arrayToCommaDelimitedString(grantedFields));
    }

    grantedFieldsAutomaton = Automatons.minusAndMinimize(grantedFieldsAutomaton, deniedFieldsAutomaton);
    return grantedFieldsAutomaton;
}

From source file:org.elasticsearch.xpack.security.authz.store.FileRolesStoreTests.java

License:Open Source License

public void testParseFile() throws Exception {
    Path path = getDataPath("roles.yml");
    Map<String, RoleDescriptor> roles = FileRolesStore.parseFile(path, logger,
            Settings.builder().put(XPackSettings.DLS_FLS_ENABLED.getKey(), true).build(),
            new XPackLicenseState(Settings.EMPTY));
    assertThat(roles, notNullValue());/*from   w  ww .  j av  a  2s.  c  om*/
    assertThat(roles.size(), is(9));

    RoleDescriptor descriptor = roles.get("role1");
    assertNotNull(descriptor);
    Role role = Role.builder(descriptor, null).build();
    assertThat(role, notNullValue());
    assertThat(role.names(), equalTo(new String[] { "role1" }));
    assertThat(role.cluster(), notNullValue());
    assertThat(role.cluster().privilege(), is(ClusterPrivilege.ALL));
    assertThat(role.indices(), notNullValue());
    assertThat(role.indices().groups(), notNullValue());
    assertThat(role.indices().groups().length, is(2));
    assertThat(role.runAs(), is(RunAsPermission.NONE));

    IndicesPermission.Group group = role.indices().groups()[0];
    assertThat(group.indices(), notNullValue());
    assertThat(group.indices().length, is(2));
    assertThat(group.indices()[0], equalTo("idx1"));
    assertThat(group.indices()[1], equalTo("idx2"));
    assertThat(group.privilege(), notNullValue());
    assertThat(group.privilege(), is(IndexPrivilege.READ));

    group = role.indices().groups()[1];
    assertThat(group.indices(), notNullValue());
    assertThat(group.indices().length, is(1));
    assertThat(group.indices()[0], equalTo("idx3"));
    assertThat(group.privilege(), notNullValue());
    assertTrue(Operations.subsetOf(IndexPrivilege.READ.getAutomaton(), group.privilege().getAutomaton()));
    assertTrue(Operations.subsetOf(IndexPrivilege.WRITE.getAutomaton(), group.privilege().getAutomaton()));

    descriptor = roles.get("role1.ab");
    assertNotNull(descriptor);
    role = Role.builder(descriptor, null).build();
    assertThat(role, notNullValue());
    assertThat(role.names(), equalTo(new String[] { "role1.ab" }));
    assertThat(role.cluster(), notNullValue());
    assertThat(role.cluster().privilege(), is(ClusterPrivilege.ALL));
    assertThat(role.indices(), notNullValue());
    assertThat(role.indices().groups(), notNullValue());
    assertThat(role.indices().groups().length, is(0));
    assertThat(role.runAs(), is(RunAsPermission.NONE));

    descriptor = roles.get("role2");
    assertNotNull(descriptor);
    role = Role.builder(descriptor, null).build();
    assertThat(role, notNullValue());
    assertThat(role.names(), equalTo(new String[] { "role2" }));
    assertThat(role.cluster(), notNullValue());
    assertTrue(Operations.sameLanguage(role.cluster().privilege().getAutomaton(),
            ClusterPrivilege.ALL.getAutomaton()));
    assertThat(role.indices(), notNullValue());
    assertThat(role.indices(), is(IndicesPermission.NONE));
    assertThat(role.runAs(), is(RunAsPermission.NONE));

    descriptor = roles.get("role3");
    assertNotNull(descriptor);
    role = Role.builder(descriptor, null).build();
    assertThat(role, notNullValue());
    assertThat(role.names(), equalTo(new String[] { "role3" }));
    assertThat(role.cluster(), notNullValue());
    assertThat(role.cluster(), is(ClusterPermission.SimpleClusterPermission.NONE));
    assertThat(role.indices(), notNullValue());
    assertThat(role.indices().groups(), notNullValue());
    assertThat(role.indices().groups().length, is(1));
    assertThat(role.runAs(), is(RunAsPermission.NONE));

    group = role.indices().groups()[0];
    assertThat(group.indices(), notNullValue());
    assertThat(group.indices().length, is(1));
    assertThat(group.indices()[0], equalTo("/.*_.*/"));
    assertThat(group.privilege(), notNullValue());
    assertTrue(Operations.sameLanguage(group.privilege().getAutomaton(),
            MinimizationOperations.minimize(
                    Operations.union(IndexPrivilege.READ.getAutomaton(), IndexPrivilege.WRITE.getAutomaton()),
                    Operations.DEFAULT_MAX_DETERMINIZED_STATES)));

    descriptor = roles.get("role4");
    assertNull(descriptor);

    descriptor = roles.get("role_run_as");
    assertNotNull(descriptor);
    role = Role.builder(descriptor, null).build();
    assertThat(role, notNullValue());
    assertThat(role.names(), equalTo(new String[] { "role_run_as" }));
    assertThat(role.cluster(), notNullValue());
    assertThat(role.cluster(), is(ClusterPermission.SimpleClusterPermission.NONE));
    assertThat(role.indices(), is(IndicesPermission.NONE));
    assertThat(role.runAs(), notNullValue());
    assertThat(role.runAs().check("user1"), is(true));
    assertThat(role.runAs().check("user2"), is(true));
    assertThat(role.runAs().check("user" + randomIntBetween(3, 9)), is(false));

    descriptor = roles.get("role_run_as1");
    assertNotNull(descriptor);
    role = Role.builder(descriptor, null).build();
    assertThat(role, notNullValue());
    assertThat(role.names(), equalTo(new String[] { "role_run_as1" }));
    assertThat(role.cluster(), notNullValue());
    assertThat(role.cluster(), is(ClusterPermission.SimpleClusterPermission.NONE));
    assertThat(role.indices(), is(IndicesPermission.NONE));
    assertThat(role.runAs(), notNullValue());
    assertThat(role.runAs().check("user1"), is(true));
    assertThat(role.runAs().check("user2"), is(true));
    assertThat(role.runAs().check("user" + randomIntBetween(3, 9)), is(false));

    descriptor = roles.get("role_fields");
    assertNotNull(descriptor);
    role = Role.builder(descriptor, null).build();
    assertThat(role, notNullValue());
    assertThat(role.names(), equalTo(new String[] { "role_fields" }));
    assertThat(role.cluster(), notNullValue());
    assertThat(role.cluster(), is(ClusterPermission.SimpleClusterPermission.NONE));
    assertThat(role.runAs(), is(RunAsPermission.NONE));
    assertThat(role.indices(), notNullValue());
    assertThat(role.indices().groups(), notNullValue());
    assertThat(role.indices().groups().length, is(1));

    group = role.indices().groups()[0];
    assertThat(group.indices(), notNullValue());
    assertThat(group.indices().length, is(1));
    assertThat(group.indices()[0], equalTo("field_idx"));
    assertThat(group.privilege(), notNullValue());
    assertTrue(Operations.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton()));
    assertTrue(group.getFieldPermissions().grantsAccessTo("foo"));
    assertTrue(group.getFieldPermissions().grantsAccessTo("boo"));
    assertTrue(group.getFieldPermissions().hasFieldLevelSecurity());

    descriptor = roles.get("role_query");
    assertNotNull(descriptor);
    role = Role.builder(descriptor, null).build();
    assertThat(role, notNullValue());
    assertThat(role.names(), equalTo(new String[] { "role_query" }));
    assertThat(role.cluster(), notNullValue());
    assertThat(role.cluster(), is(ClusterPermission.SimpleClusterPermission.NONE));
    assertThat(role.runAs(), is(RunAsPermission.NONE));
    assertThat(role.indices(), notNullValue());
    assertThat(role.indices().groups(), notNullValue());
    assertThat(role.indices().groups().length, is(1));

    group = role.indices().groups()[0];
    assertThat(group.indices(), notNullValue());
    assertThat(group.indices().length, is(1));
    assertThat(group.indices()[0], equalTo("query_idx"));
    assertThat(group.privilege(), notNullValue());
    assertTrue(Operations.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton()));
    assertFalse(group.getFieldPermissions().hasFieldLevelSecurity());
    assertThat(group.getQuery(), notNullValue());

    descriptor = roles.get("role_query_fields");
    assertNotNull(descriptor);
    role = Role.builder(descriptor, null).build();
    assertThat(role, notNullValue());
    assertThat(role.names(), equalTo(new String[] { "role_query_fields" }));
    assertThat(role.cluster(), notNullValue());
    assertThat(role.cluster(), is(ClusterPermission.SimpleClusterPermission.NONE));
    assertThat(role.runAs(), is(RunAsPermission.NONE));
    assertThat(role.indices(), notNullValue());
    assertThat(role.indices().groups(), notNullValue());
    assertThat(role.indices().groups().length, is(1));

    group = role.indices().groups()[0];
    assertThat(group.indices(), notNullValue());
    assertThat(group.indices().length, is(1));
    assertThat(group.indices()[0], equalTo("query_fields_idx"));
    assertThat(group.privilege(), notNullValue());
    assertTrue(Operations.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton()));
    assertTrue(group.getFieldPermissions().grantsAccessTo("foo"));
    assertTrue(group.getFieldPermissions().grantsAccessTo("boo"));
    assertTrue(group.getFieldPermissions().hasFieldLevelSecurity());
    assertThat(group.getQuery(), notNullValue());
}