Example usage for org.apache.commons.lang StringUtils countMatches

List of usage examples for org.apache.commons.lang StringUtils countMatches

Introduction

In this page you can find the example usage for org.apache.commons.lang StringUtils countMatches.

Prototype

public static int countMatches(String str, String sub) 

Source Link

Document

Counts how many times the substring appears in the larger String.

Usage

From source file:org.apache.directory.studio.test.integration.ui.EntryEditorTest.java

/**
 * Test adding, editing and deleting of attributes in the entry editor.
 *
 * @throws Exception/* w w  w. j ava 2 s.  c  o  m*/
 *             the exception
 */
@Test
public void testAddEditDeleteAttribute() throws Exception {
    browserViewBot.selectEntry("DIT", "Root DSE", "ou=system", "ou=users", "cn=Barbara Jensen");

    EntryEditorBot entryEditorBot = studioBot.getEntryEditorBot("cn=Barbara Jensen,ou=users,ou=system");
    entryEditorBot.activate();
    String dn = entryEditorBot.getDnText();
    assertEquals("DN: cn=Barbara Jensen,ou=users,ou=system", dn);
    assertEquals(8, entryEditorBot.getAttributeValues().size());
    assertEquals("", modificationLogsViewBot.getModificationLogsText());

    // add description attribute
    entryEditorBot.activate();
    NewAttributeWizardBot wizardBot = entryEditorBot.openNewAttributeWizard();
    assertTrue(wizardBot.isVisible());
    wizardBot.typeAttributeType("description");
    wizardBot.clickFinishButton();
    entryEditorBot.typeValueAndFinish("This is the 1st description.");
    assertEquals(9, entryEditorBot.getAttributeValues().size());
    assertTrue(entryEditorBot.getAttributeValues().contains("description: This is the 1st description."));
    modificationLogsViewBot.waitForText("add: description\ndescription: This is the 1st description.");

    // add second value
    entryEditorBot.activate();
    entryEditorBot.addValue("description");
    entryEditorBot.typeValueAndFinish("This is the 2nd description.");
    assertEquals(10, entryEditorBot.getAttributeValues().size());
    assertTrue(entryEditorBot.getAttributeValues().contains("description: This is the 1st description."));
    assertTrue(entryEditorBot.getAttributeValues().contains("description: This is the 2nd description."));
    modificationLogsViewBot.waitForText("add: description\ndescription: This is the 2nd description.");

    // edit second value
    entryEditorBot.editValue("description", "This is the 2nd description.");
    entryEditorBot.typeValueAndFinish("This is the 3rd description.");
    assertEquals(10, entryEditorBot.getAttributeValues().size());
    assertEquals(10, entryEditorBot.getAttributeValues().size());
    assertTrue(entryEditorBot.getAttributeValues().contains("description: This is the 1st description."));
    assertFalse(entryEditorBot.getAttributeValues().contains("description: This is the 2nd description."));
    assertTrue(entryEditorBot.getAttributeValues().contains("description: This is the 3rd description."));
    modificationLogsViewBot.waitForText("delete: description\ndescription: This is the 2nd description.");
    modificationLogsViewBot.waitForText("add: description\ndescription: This is the 3rd description.");

    // delete second value
    entryEditorBot.deleteValue("description", "This is the 3rd description.");
    assertEquals(9, entryEditorBot.getAttributeValues().size());
    assertTrue(entryEditorBot.getAttributeValues().contains("description: This is the 1st description."));
    assertFalse(entryEditorBot.getAttributeValues().contains("description: This is the 3rd description."));
    modificationLogsViewBot.waitForText("delete: description\ndescription: This is the 3rd description.");

    // edit 1st value
    entryEditorBot.editValue("description", "This is the 1st description.");
    entryEditorBot.typeValueAndFinish("This is the final description.");
    assertEquals(9, entryEditorBot.getAttributeValues().size());
    assertFalse(entryEditorBot.getAttributeValues().contains("description: This is the 1st description."));
    assertTrue(entryEditorBot.getAttributeValues().contains("description: This is the final description."));
    modificationLogsViewBot.waitForText("replace: description\ndescription: This is the final description.");

    // delete 1st value/attribute
    entryEditorBot.deleteValue("description", "This is the final description.");
    assertEquals(8, entryEditorBot.getAttributeValues().size());
    assertFalse(entryEditorBot.getAttributeValues().contains("description: This is the final description."));
    modificationLogsViewBot.waitForText("delete: description\n-");

    assertEquals("Expected 6 modifications.", 6,
            StringUtils.countMatches(modificationLogsViewBot.getModificationLogsText(), "#!RESULT OK"));
}

From source file:org.apache.directory.studio.test.integration.ui.EntryEditorTest.java

/**
 * DIRSTUDIO-483: DN Editor escapes all non-ascii characters
 *
 * @throws Exception//  w w  w  . j ava 2s.c  om
 *             the exception
 */
@Test
public void testDnValueEditor() throws Exception {
    browserViewBot.selectEntry("DIT", "Root DSE", "ou=system", "ou=groups", "cn=My Group");

    EntryEditorBot entryEditorBot = studioBot.getEntryEditorBot("cn=My Group,ou=groups,ou=system");
    entryEditorBot.activate();
    String dn = entryEditorBot.getDnText();
    assertEquals("DN: cn=My Group,ou=groups,ou=system", dn);
    assertEquals(4, entryEditorBot.getAttributeValues().size());

    // add member attribute
    NewAttributeWizardBot wizardBot = entryEditorBot.openNewAttributeWizard();
    assertTrue(wizardBot.isVisible());
    wizardBot.typeAttributeType("member");
    DnEditorDialogBot dnEditorBot = wizardBot.clickFinishButtonExpectingDnEditor();
    assertTrue(dnEditorBot.isVisible());
    SelectDnDialogBot selectDnBot = dnEditorBot.clickBrowseButtonExpectingSelectDnDialog();
    assertTrue(selectDnBot.isVisible());
    selectDnBot.selectEntry("Root DSE", "ou=system", "ou=users", "cn=\\#\\\\\\+\\, \\\"\u00F6\u00E9\\\"");
    selectDnBot.clickOkButton();
    dnEditorBot.activate();
    assertEquals("cn=\\#\\\\\\+\\, \\\"\u00F6\u00E9\\\",ou=users,ou=system", dnEditorBot.getDnText());
    dnEditorBot.clickOkButton();

    // assert value after saved and reloaded from server
    SWTUtils.sleep(1000);
    assertEquals(5, entryEditorBot.getAttributeValues().size());
    assertTrue(entryEditorBot.getAttributeValues()
            .contains("member: cn=\\#\\\\\\+\\, \\\"\u00F6\u00E9\\\",ou=users,ou=system"));
    dnEditorBot = entryEditorBot.editValueExpectingDnEditor("member",
            "cn=\\#\\\\\\+\\, \\\"\u00F6\u00E9\\\",ou=users,ou=system");
    assertEquals("cn=\\#\\\\\\+\\, \\\"\u00F6\u00E9\\\",ou=users,ou=system", dnEditorBot.getDnText());
    dnEditorBot.clickCancelButton();

    modificationLogsViewBot.waitForText("#!RESULT OK");
    assertEquals("Expected 1 modification.", 1,
            StringUtils.countMatches(modificationLogsViewBot.getModificationLogsText(), "#!RESULT OK"));
}

From source file:org.apache.falcon.regression.core.enumsAndConstants.FeedType.java

public int getDirDepth() {
    return StringUtils.countMatches(pathValue, "/");
}

From source file:org.apache.gobblin.compaction.parser.CompactionPathParser.java

private DateTime getTime(String timeString) {
    DateTimeZone timeZone = DateTimeZone.forID(MRCompactor.DEFAULT_COMPACTION_TIMEZONE);
    int splits = StringUtils.countMatches(timeString, "/");
    String timePattern = "";
    if (splits == 3) {
        timePattern = "YYYY/MM/dd/HH";
    } else if (splits == 2) {
        timePattern = "YYYY/MM/dd";
    }//from ww w.j  a  v  a  2  s .c  om
    DateTimeFormatter timeFormatter = DateTimeFormat.forPattern(timePattern).withZone(timeZone);
    return timeFormatter.parseDateTime(timeString);
}

From source file:org.apache.hadoop.hdfs.server.namenode.INodeAttributeProvider.java

@VisibleForTesting
String[] getPathElements(String path) {
    path = path.trim();//from w  ww .ja  v  a 2  s .  co m
    if (path.charAt(0) != Path.SEPARATOR_CHAR) {
        throw new IllegalArgumentException("It must be an absolute path: " + path);
    }
    int numOfElements = StringUtils.countMatches(path, Path.SEPARATOR);
    if (path.length() > 1 && path.endsWith(Path.SEPARATOR)) {
        numOfElements--;
    }
    String[] pathElements = new String[numOfElements];
    int elementIdx = 0;
    int idx = 0;
    int found = path.indexOf(Path.SEPARATOR_CHAR, idx);
    while (found > -1) {
        if (found > idx) {
            pathElements[elementIdx++] = path.substring(idx, found);
        }
        idx = found + 1;
        found = path.indexOf(Path.SEPARATOR_CHAR, idx);
    }
    if (idx < path.length()) {
        pathElements[elementIdx] = path.substring(idx);
    }
    return pathElements;
}

From source file:org.apache.hadoop.hive.metastore.hbase.stats.merge.ColumnStatsMergerFactory.java

private static int countNumBitVectors(String s) {
    if (s != null) {
        return StringUtils.countMatches(s, "{");
    } else {//  www  .  j  a  v  a 2  s.c  om
        return 0;
    }
}

From source file:org.apache.hadoop.hive.ql.exec.tez.WorkloadManager.java

private void applyNewResourcePlanOnMasterThread(EventState e, WmThreadSyncWork syncWork,
        HashSet<String> poolsToRedistribute) {
    int totalQueryParallelism = 0;
    WMFullResourcePlan plan = e.resourcePlanToApply;
    if (plan == null) {
        // NULL plan means WM is disabled via a command; it could still be reenabled.
        LOG.info("Disabling workload management because the resource plan has been removed");
        this.rpName = null;
        this.defaultPool = null;
        this.userPoolMapping = new UserPoolMapping(null, null);
    } else {//from  www  . j  a  v  a 2 s .c  o  m
        this.rpName = plan.getPlan().getName();
        this.defaultPool = plan.getPlan().getDefaultPoolPath();
        this.userPoolMapping = new UserPoolMapping(plan.getMappings(), defaultPool);
    }
    // Note: we assume here that plan has been validated beforehand, so we don't verify
    //       that fractions or query parallelism add up, etc.
    Map<String, PoolState> oldPools = pools;
    pools = new HashMap<>();

    ArrayList<List<WMPool>> poolsByLevel = new ArrayList<>();
    if (plan != null) {
        // For simplicity, to always have parents while storing pools in a flat structure, we'll
        // first distribute them by levels, then add level by level.
        for (WMPool pool : plan.getPools()) {
            String fullName = pool.getPoolPath();
            int ix = StringUtils.countMatches(fullName, POOL_SEPARATOR_STR);
            while (poolsByLevel.size() <= ix) {
                poolsByLevel.add(new LinkedList<WMPool>()); // We expect all the levels to have items.
            }
            poolsByLevel.get(ix).add(pool);
        }
    }
    for (int level = 0; level < poolsByLevel.size(); ++level) {
        List<WMPool> poolsOnLevel = poolsByLevel.get(level);
        for (WMPool pool : poolsOnLevel) {
            String fullName = pool.getPoolPath();
            int qp = pool.getQueryParallelism();
            double fraction = pool.getAllocFraction();
            if (level > 0) {
                String parentName = fullName.substring(0, fullName.lastIndexOf(POOL_SEPARATOR));
                PoolState parent = pools.get(parentName);
                fraction = parent.finalFraction * fraction;
                parent.finalFractionRemaining -= fraction;
            }
            PoolState state = oldPools == null ? null : oldPools.remove(fullName);
            if (state == null) {
                state = new PoolState(fullName, qp, fraction, pool.getSchedulingPolicy(), metricsSystem);
            } else {
                // This will also take care of the queries if query parallelism changed.
                state.update(qp, fraction, syncWork, e, pool.getSchedulingPolicy());
                poolsToRedistribute.add(fullName);
            }
            state.setTriggers(new LinkedList<Trigger>());
            LOG.info("Adding Hive pool: " + state);
            pools.put(fullName, state);
            totalQueryParallelism += qp;
        }
    }
    for (PoolState pool : pools.values()) {
        if (pool.metrics != null) {
            pool.metrics
                    .setMaxExecutors(allocationManager.translateAllocationToCpus(pool.finalFractionRemaining));
        }
    }
    // TODO: in the current impl, triggers are added to RP. For tez, no pool triggers (mapping between trigger name and
    // pool name) will exist which means all triggers applies to tez. For LLAP, pool triggers has to exist for attaching
    // triggers to specific pools.
    // For usability,
    // Provide a way for triggers sharing/inheritance possibly with following modes
    // ONLY - only to pool
    // INHERIT - child pools inherit from parent
    // GLOBAL - all pools inherit
    if (plan != null && plan.isSetTriggers() && plan.isSetPoolTriggers()) {
        Map<String, Trigger> triggers = new HashMap<>();
        for (WMTrigger trigger : plan.getTriggers()) {
            ExecutionTrigger execTrigger = ExecutionTrigger.fromWMTrigger(trigger);
            triggers.put(trigger.getTriggerName(), execTrigger);
        }
        for (WMPoolTrigger poolTrigger : plan.getPoolTriggers()) {
            PoolState pool = pools.get(poolTrigger.getPool());
            Trigger trigger = triggers.get(poolTrigger.getTrigger());
            pool.triggers.add(trigger);
            poolsToRedistribute.add(pool.fullName);
            LOG.info("Adding pool " + pool.fullName + " trigger " + trigger);
        }
    }

    if (oldPools != null && !oldPools.isEmpty()) {
        // Looks like some pools were removed; kill running queries, re-queue the queued ones.
        for (PoolState oldPool : oldPools.values()) {
            oldPool.destroy(syncWork, e.getRequests, e.toReuse);
        }
    }

    LOG.info("Updating with " + totalQueryParallelism + " total query parallelism");
    int deltaSessions = totalQueryParallelism - this.totalQueryParallelism;
    this.totalQueryParallelism = totalQueryParallelism;
    if (deltaSessions == 0)
        return; // Nothing to do.
    if (deltaSessions < 0) {
        // First, see if we have sessions that we were planning to restart/kill; get rid of those.
        deltaSessions = transferSessionsToDestroy(syncWork.toKillQuery.keySet(), syncWork.toDestroyNoRestart,
                deltaSessions);
        deltaSessions = transferSessionsToDestroy(syncWork.toRestartInUse, syncWork.toDestroyNoRestart,
                deltaSessions);
    }
    if (deltaSessions != 0) {
        failOnFutureFailure(tezAmPool.resizeAsync(deltaSessions, syncWork.toDestroyNoRestart));
    }
}

From source file:org.apache.hadoop.hive.ql.session.TestClearDanglingScratchDir.java

@Test
public void testClearDanglingScratchDir() throws Exception {

    // No scratch dir initially
    redirectStdOutErr();//from   w w w  . ja v a 2s .c o m
    ClearDanglingScratchDir.main(new String[] { "-v", "-s",
            m_dfs.getFileSystem().getUri().toString() + scratchDir.toUri().toString() });
    rollbackStdOutErr();
    Assert.assertTrue(stderr.toString().contains("Cannot find any scratch directory to clear"));

    // Create scratch dir without lock files
    m_dfs.getFileSystem().mkdirs(new Path(new Path(scratchDir, "dummy"), UUID.randomUUID().toString()));
    redirectStdOutErr();
    ClearDanglingScratchDir.main(new String[] { "-v", "-s",
            m_dfs.getFileSystem().getUri().toString() + scratchDir.toUri().toString() });
    rollbackStdOutErr();
    Assert.assertEquals(StringUtils.countMatches(stderr.toString(),
            "since it does not contain " + SessionState.LOCK_FILE_NAME), 1);
    Assert.assertTrue(stderr.toString().contains("Cannot find any scratch directory to clear"));

    // One live session
    SessionState ss = SessionState.start(conf);
    redirectStdOutErr();
    ClearDanglingScratchDir.main(new String[] { "-v", "-s",
            m_dfs.getFileSystem().getUri().toString() + scratchDir.toUri().toString() });
    rollbackStdOutErr();
    Assert.assertEquals(StringUtils.countMatches(stderr.toString(), "is being used by live process"), 1);

    // One dead session with dry-run
    ss.releaseSessionLockFile();
    redirectStdOutErr();
    ClearDanglingScratchDir.main(new String[] { "-r", "-v", "-s",
            m_dfs.getFileSystem().getUri().toString() + scratchDir.toUri().toString() });
    rollbackStdOutErr();
    // Find one session dir to remove
    Assert.assertFalse(stdout.toString().isEmpty());

    // Remove the dead session dir
    redirectStdOutErr();
    ClearDanglingScratchDir.main(new String[] { "-v", "-s",
            m_dfs.getFileSystem().getUri().toString() + scratchDir.toUri().toString() });
    rollbackStdOutErr();
    Assert.assertTrue(stderr.toString().contains("Removing 1 scratch directories"));
    Assert.assertEquals(StringUtils.countMatches(stderr.toString(), "removed"), 1);
    ss.close();
}

From source file:org.apache.hadoop.streaming.TestUnconsumedInput.java

@Test
public void testUnconsumedInput() throws Exception {
    String outFileName = "part-00000";
    File outFile = null;/*from   ww  w. j a  va 2s. c  om*/
    try {
        try {
            FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
        } catch (Exception e) {
        }

        createInput();

        // setup config to ignore unconsumed input
        Configuration conf = new Configuration();
        conf.set("stream.minRecWrittenToEnableSkip_", "0");

        job = new StreamJob();
        job.setConf(conf);
        int exitCode = job.run(genArgs());
        assertEquals("Job failed", 0, exitCode);
        outFile = new File(OUTPUT_DIR, outFileName).getAbsoluteFile();
        String output = StreamUtil.slurp(outFile);
        assertEquals("Output was truncated", EXPECTED_OUTPUT_SIZE, StringUtils.countMatches(output, "\t"));
    } finally {
        INPUT_FILE.delete();
        FileUtil.fullyDelete(OUTPUT_DIR.getAbsoluteFile());
    }
}

From source file:org.apache.jackrabbit.core.query.lucene.JahiaLuceneQueryFactoryImpl.java

private Query resolveSingleMixedInclusiveExclusiveRangeQuery(String expression) {
    Query qobj = null;//w  ww.  j  ava 2 s . co m
    boolean inclusiveEndRange = expression.endsWith("]");
    boolean exclusiveEndRange = expression.endsWith("}");
    int inclusiveBeginRangeCount = StringUtils.countMatches(expression, "[");
    int exclusiveBeginRangeCount = StringUtils.countMatches(expression, "{");
    if (((inclusiveEndRange && exclusiveBeginRangeCount == 1 && inclusiveBeginRangeCount == 0)
            || (exclusiveEndRange && inclusiveBeginRangeCount == 1 && exclusiveBeginRangeCount == 0))) {
        String fieldName = (inclusiveEndRange || exclusiveEndRange)
                ? StringUtils.substringBefore(expression, inclusiveEndRange ? ":{" : ":[")
                : "";
        if (fieldName.indexOf(' ') == -1) {
            fieldName = fieldName.replace("\\:", ":");
            String rangeExpression = StringUtils.substringBetween(expression, inclusiveEndRange ? "{" : "[",
                    inclusiveEndRange ? "]" : "}");
            String part1 = StringUtils.substringBefore(rangeExpression, " TO");
            String part2 = StringUtils.substringAfter(rangeExpression, "TO ");
            SchemaField sf = new SchemaField(fieldName, JahiaQueryParser.STRING_TYPE);
            qobj = JahiaQueryParser.STRING_TYPE.getRangeQuery(null, sf, part1.equals("*") ? null : part1,
                    part2.equals("*") ? null : part2, !inclusiveEndRange, inclusiveEndRange);
        }
    }
    return qobj;
}