Example usage for org.apache.commons.collections.bag HashBag HashBag

List of usage examples for org.apache.commons.collections.bag HashBag HashBag

Introduction

In this page you can find the example usage for org.apache.commons.collections.bag HashBag HashBag.

Prototype

public HashBag(Collection coll) 

Source Link

Document

Constructs a bag containing all the members of the given collection.

Usage

From source file:org.lockss.crawler.FuncZipExploder.java

public void runTest(boolean good) throws Exception {
    log.debug3("About to create content");
    createContent();/*from   w w w  .java 2  s .  co m*/

    // get the root of the simContent
    String simDir = sau.getSimRoot();

    log.debug3("About to crawl content");
    boolean res = crawlContent(good ? null : "002file.bin");
    if (good) {
        assertTrue("Crawl failed", res);
        if (false)
            assertTrue(
                    "Crawl should succeed but got " + lastCrawlResult
                            + (lastCrawlMessage == null ? "" : " with " + lastCrawlMessage),
                    lastCrawlResult == Crawler.STATUS_SUCCESSFUL);
    } else {
        assertFalse("Crawl succeeded", res);
        if (false)
            assertTrue(
                    "Crawl should get STATUS_PLUGIN_ERROR but got " + lastCrawlResult
                            + (lastCrawlMessage == null ? "" : " with " + lastCrawlMessage),
                    lastCrawlResult == Crawler.STATUS_PLUGIN_ERROR);
        return;
    }

    // read all the files links from the root of the simcontent
    // check the link level of the file and see if it contains
    // in myCUS (check if the crawler crawl within the max. depth)
    CachedUrlSet myCUS = sau.getAuCachedUrlSet();
    File dir = new File(simDir);
    if (dir.isDirectory()) {
        File f[] = dir.listFiles();
        log.debug("Checking simulated content.");
        checkThruFileTree(f, myCUS);
        log.debug("Checking simulated content done.");
        checkExplodedUrls();
        checkUnExplodedUrls();

        log.debug("Check finished.");
    } else {
        log.error("Error: The root path of the simulated" + " content [" + dir + "] is not a directory");
    }

    // Test PluginManager.getAuContentSize(), just because this is a
    // convenient place to do it.  If the simulated AU params are changed, or
    // SimulatedContentGenerator is changed, this number may have to
    // change.  NB - because the ZIP files are compressed,  their
    // size varies randomly by a small amount.
    long expected = 2615;
    long actual = AuUtil.getAuContentSize(sau, true);
    long error = expected - actual;
    long absError = (error < 0 ? -error : error);
    assertTrue("size mismatch " + expected + " vs. " + actual, absError < 60);

    List sbc = ((MySimulatedArchivalUnit) sau).sbc;
    Bag b = new HashBag(sbc);
    Set uniq = new HashSet(b.uniqueSet());
    for (Iterator iter = uniq.iterator(); iter.hasNext();) {
        b.remove(iter.next(), 1);
    }
    // Permission pages get checked twice.  Hard to avoid that, so allow it
    b.removeAll(sau.getPermissionUrls());
    // archives get checked twice - from checkThruFileTree & checkExplodedUrls
    b.remove("http://www.example.com/content.zip");
    // This test is screwed up by the use of shouldBeCached() in
    // ZipExploder() to find the AU to store the URL in.
    //assertEmpty("shouldBeCached() called multiple times on same URLs.", b);

}

From source file:org.lockss.crawler.FuncZipExploder2.java

public void testRunSelf() throws Exception {
    log.debug3("About to create content");
    createContent();//from   w  w w .  j ava2  s . c  om

    // get the root of the simContent
    String simDir = sau.getSimRoot();

    log.debug3("About to crawl content");
    crawlContent();

    // read all the files links from the root of the simcontent
    // check the link level of the file and see if it contains
    // in myCUS (check if the crawler crawl within the max. depth)
    CachedUrlSet myCUS = sau.getAuCachedUrlSet();
    File dir = new File(simDir);
    if (dir.isDirectory()) {
        File f[] = dir.listFiles();
        log.debug("Checking simulated content.");
        checkThruFileTree(f, myCUS);
        log.debug("Checking simulated content done.");
        checkUnExplodedUrls();
        checkExplodedUrls();

        log.debug("Check finished.");
    } else {
        log.error("Error: The root path of the simulated" + " content [" + dir + "] is not a directory");
    }

    // Test PluginManager.getAuContentSize(), just because this is a
    // convenient place to do it.  If the simulated AU params are changed, or
    // SimulatedContentGenerator is changed, this number may have to
    // change.  NB - because the ZIP files are compressed,  their
    // size varies randomly by a small amount.
    long expected = 285227;
    long actual = AuUtil.getAuContentSize(sau, true);
    long error = expected - actual;
    long absError = (error < 0 ? -error : error);
    assertTrue("size mismatch " + expected + " vs. " + actual, absError < 60);

    if (false) {
        List sbc = ((MySimulatedArchivalUnit) sau).sbc;
        Bag b = new HashBag(sbc);
        Set uniq = new HashSet(b.uniqueSet());
        for (Iterator iter = uniq.iterator(); iter.hasNext();) {
            b.remove(iter.next(), 1);
        }
        // Permission pages get checked twice.  Hard to avoid that, so allow it
        b.removeAll(sau.getPermissionUrls());
        // archives get checked twice - from checkThruFileTree & checkExplodedUrls
        b.remove("http://www.example.com/content.zip");
        // This test is screwed up by the use of shouldBeCached() in
        // ZipExploder() to find the AU to store the URL in.
        //assertEmpty("shouldBeCached() called multiple times on same URLs.", b);
    }
    // Now check the DOIs
    checkDOIs();
}

From source file:org.marketcetera.util.test.CollectionAssert.java

/**
 * Asserts that the two given arrays are permutations of each
 * other. This assertion holds if both arrays are null, or if they
 * have one or more (but an equal number of) null elements. If the
 * assertion does not hold, the {@link AssertionError} thrown
 * starts with the given message, which may be null if no such
 * custom message prefix is desired.// w  w w . j  a  v a 2 s  .com
 *
 * @param message The identifying message.
 * @param expected The expected array.
 * @param actual The actual array.
 */

public static <T> void assertArrayPermutation(String message, T[] expected, T[] actual) {
    if ((expected == null) && (actual == null)) {
        return;
    }
    String content = null;
    if (expected == null) {
        content = "expected array is null but actual is not"; //$NON-NLS-1$
    } else if (actual == null) {
        content = "actual array is null but expected is not"; //$NON-NLS-1$
    } else if (expected.getClass() != actual.getClass()) {
        content = "expected array class is " + //$NON-NLS-1$
                expected.getClass().getName() + " but actual array class is " + //$NON-NLS-1$
                actual.getClass().getName();
    } else {
        Bag expectedBag = new HashBag(Arrays.asList(expected));
        Bag actualBag = new HashBag(Arrays.asList(actual));
        for (Object e : expectedBag) {
            if (!actualBag.remove(e, 1)) {
                content = "actual is missing '" + //$NON-NLS-1$
                        e + "'"; //$NON-NLS-1$
                break;
            }
        }
        if (content == null) {
            if (actualBag.size() == 0) {
                return;
            }
            content = "actual contains extra elements such as " + //$NON-NLS-1$
                    actualBag.iterator().next();
        }
    }
    if (message != null) {
        content = message + " " + content; //$NON-NLS-1$
    }
    fail(content);
}