Example usage for org.apache.commons.collections Predicate Predicate

List of usage examples for org.apache.commons.collections Predicate Predicate

Introduction

In this page you can find the example usage for org.apache.commons.collections Predicate Predicate.

Prototype

Predicate

Source Link

Usage

From source file:com.cyberway.issue.crawler.extractor.ExtractorHTMLTest.java

/**
 * Test if scheme is maintained by speculative hops onto exact 
 * same host/*  w  w w  .  j  av  a 2 s.  co  m*/
 * 
 * [HER-1524] speculativeFixup in ExtractorJS should maintain URL scheme
 */
public void testSpeculativeLinkExtraction() throws URIException {
    CrawlURI curi = new CrawlURI(UURIFactory.getInstance("https://www.example.com"));
    CharSequence cs = "<script type=\"text/javascript\">_parameter=\"www.anotherexample.com\";"
            + "_anotherparameter=\"www.example.com/index.html\"" + ";</script>";
    this.extractor.extract(curi, cs);

    assertTrue(CollectionUtils.exists(curi.getOutLinks(), new Predicate() {
        public boolean evaluate(Object object) {
            return ((Link) object).getDestination().toString().equals("http://www.anotherexample.com/");
        }
    }));
    assertTrue(CollectionUtils.exists(curi.getOutLinks(), new Predicate() {
        public boolean evaluate(Object object) {
            return ((Link) object).getDestination().toString().equals("https://www.example.com/index.html");
        }
    }));
}

From source file:com.linkedin.pinot.common.query.gen.AvroQueryGenerator.java

private static org.apache.avro.Schema extractSchemaFromUnionIfNeeded(org.apache.avro.Schema fieldSchema) {
    if ((fieldSchema).getType() == Type.UNION) {
        fieldSchema = ((org.apache.avro.Schema) CollectionUtils.find(fieldSchema.getTypes(), new Predicate() {
            @Override/*from   w  w w  .  j  a v  a 2s . c  om*/
            public boolean evaluate(Object object) {
                return ((org.apache.avro.Schema) object).getType() != Type.NULL;
            }
        }));
    }
    return fieldSchema;
}

From source file:it.unimi.dsi.sux4j.mph.TwoStepsGOV3Function.java

/** Creates a new two-step function for the given keys and values.
 * //from  w  w  w . j a  v a  2  s .  co m
 * @param keys the keys in the domain of the function.
 * @param transform a transformation strategy for the keys.
 * @param values values to be assigned to each key, in the same order of the iterator returned by <code>keys</code>; if {@code null}, the
 * assigned value will the the ordinal number of each key.
 * @param tempDir a temporary directory for the store files, or {@code null} for the standard temporary directory.
 * @param chunkedHashStore a chunked hash store containing the keys associated with their rank, or {@code null}; the store
 * can be unchecked, but in this case <code>keys</code> and <code>transform</code> must be non-{@code null}. 
 */
protected TwoStepsGOV3Function(final Iterable<? extends T> keys,
        final TransformationStrategy<? super T> transform, final LongBigList values, final File tempDir,
        ChunkedHashStore<T> chunkedHashStore) throws IOException {
    this.transform = transform;
    final ProgressLogger pl = new ProgressLogger(LOGGER);
    pl.displayLocalSpeed = true;
    pl.displayFreeMemory = true;
    final RandomGenerator random = new XorShift1024StarRandomGenerator();
    pl.itemsName = "keys";

    final boolean givenChunkedHashStore = chunkedHashStore != null;
    if (!givenChunkedHashStore) {
        if (keys == null)
            throw new IllegalArgumentException(
                    "If you do not provide a chunked hash store, you must provide the keys");
        chunkedHashStore = new ChunkedHashStore<T>(transform, pl);
        chunkedHashStore.reset(random.nextLong());
        chunkedHashStore.addAll(keys.iterator());
    }
    n = chunkedHashStore.size();
    defRetValue = -1; // For the very few cases in which we can decide

    if (n == 0) {
        rankMean = escape = width = 0;
        firstFunction = secondFunction = null;
        remap = null;
        if (!givenChunkedHashStore)
            chunkedHashStore.close();
        return;
    }

    // Compute distribution of values and maximum number of bits.
    int w = 0, size;
    long v;
    final Long2LongOpenHashMap counts = new Long2LongOpenHashMap();
    counts.defaultReturnValue(-1);
    for (LongIterator i = values.iterator(); i.hasNext();) {
        v = i.nextLong();
        counts.put(v, counts.get(v) + 1);
        size = Fast.length(v);
        if (size > w)
            w = size;
    }

    this.width = w;
    final int m = counts.size();

    LOGGER.debug("Generating two-steps GOV3 function with " + w + " output bits...");

    // Sort keys by reverse frequency
    final long[] keysArray = counts.keySet().toLongArray(new long[m]);
    LongArrays.quickSort(keysArray, 0, keysArray.length, new AbstractLongComparator() {
        private static final long serialVersionUID = 1L;

        public int compare(final long a, final long b) {
            return Long.signum(counts.get(b) - counts.get(a));
        }
    });

    long mean = 0;
    for (int i = 0; i < keysArray.length; i++)
        mean += i * counts.get(keysArray[i]);
    rankMean = (double) mean / n;

    // Analyze data and choose a threshold
    long post = n, bestCost = Long.MAX_VALUE;
    int pos = 0, best = -1;

    // Examine every possible choice for r. Note that r = 0 implies one function, so we do not need to test the case r == w.
    for (int r = 0; r < w && pos < m; r++) {

        /* This cost function is dependent on the implementation of GOV3Function. 
         * Note that for r = 0 we are actually computing the cost of a single function (the first one). */
        final long cost = (long) Math.min(GOV3Function.C * n * 1.126 + n * r, GOV3Function.C * n * r)
                + (long) Math.min(GOV3Function.C * post * 1.126 + post * w, GOV3Function.C * post * w)
                + pos * Long.SIZE;

        if (cost < bestCost) {
            best = r;
            bestCost = cost;
        }

        /* We add to pre and subtract from post the counts of keys from position (1<<r)-1 to position (1<<r+1)-1. */
        for (int j = 0; j < (1 << r) && pos < m; j++) {
            final long c = counts.get(keysArray[pos++]);
            post -= c;
        }
    }

    if (ASSERTS)
        assert pos == m;

    counts.clear();
    counts.trim();

    // We must keep the remap array small.
    if (best >= Integer.SIZE)
        best = Integer.SIZE - 1;

    LOGGER.debug("Best threshold: " + best);
    escape = (1 << best) - 1;
    System.arraycopy(keysArray, 0, remap = new long[escape], 0, remap.length);
    final Long2LongOpenHashMap map = new Long2LongOpenHashMap();
    map.defaultReturnValue(-1);
    for (int i = 0; i < escape; i++)
        map.put(remap[i], i);

    if (best != 0) {
        firstFunction = new GOV3Function.Builder<T>().keys(keys).transform(transform).store(chunkedHashStore)
                .values(new AbstractLongBigList() {
                    public long getLong(long index) {
                        long value = map.get(values.getLong(index));
                        return value == -1 ? escape : value;
                    }

                    public long size64() {
                        return n;
                    }
                }, best).indirect().build();

        LOGGER.debug("Actual bit cost per key of first function: " + (double) firstFunction.numBits() / n);
    } else
        firstFunction = null;

    chunkedHashStore.filter(new Predicate() {
        public boolean evaluate(Object triple) {
            return firstFunction == null || firstFunction.getLongByTriple((long[]) triple) == escape;
        }
    });

    secondFunction = new GOV3Function.Builder<T>().store(chunkedHashStore).values(values, w).indirect().build();

    this.seed = chunkedHashStore.seed();
    if (!givenChunkedHashStore)
        chunkedHashStore.close();

    LOGGER.debug("Actual bit cost per key of second function: " + (double) secondFunction.numBits() / n);

    LOGGER.info("Actual bit cost per key: " + (double) numBits() / n);
    LOGGER.info("Completed.");

}

From source file:flex2.compiler.mxml.rep.MxmlDocument.java

/**
 * return an iterator over visual children that haven't been marked described.
 *///from ww w  .j a v a 2  s. co  m
// TODO visual children are marked described by the descriptor
//      generator, so there is some order-of-codegen sensitivity
//      here. It's the only such dependency, but at some point
//      descriptor codegen and marking-of-isDescribed should be
//      split apart.
public final Iterator getProceduralVisualChildInitializerIterator() {
    if (root instanceof MovieClip) {
        return new FilterIterator(((MovieClip) root).getChildInitializerIterator(), new Predicate() {
            public boolean evaluate(Object object) {
                ValueInitializer init = (ValueInitializer) object;
                Object value = init.getValue();
                return !(value instanceof Model)
                        || (!((Model) value).isDescribed() && !((Model) value).isStateSpecific());
            }
        });
    } else {
        return Collections.EMPTY_LIST.iterator();
    }
}

From source file:it.unimi.dsi.sux4j.mph.TwoStepsMWHCFunction.java

/** Creates a new two-step function for the given keys and values.
 * /*from  w  w  w . j  a  va  2 s .c o  m*/
 * @param keys the keys in the domain of the function.
 * @param transform a transformation strategy for the keys.
 * @param values values to be assigned to each key, in the same order of the iterator returned by <code>keys</code>; if {@code null}, the
 * assigned value will the the ordinal number of each key.
 * @param tempDir a temporary directory for the store files, or {@code null} for the standard temporary directory.
 * @param chunkedHashStore a chunked hash store containing the keys associated with their rank, or {@code null}; the store
 * can be unchecked, but in this case <code>keys</code> and <code>transform</code> must be non-{@code null}. 
 */
protected TwoStepsMWHCFunction(final Iterable<? extends T> keys,
        final TransformationStrategy<? super T> transform, final LongBigList values, final File tempDir,
        ChunkedHashStore<T> chunkedHashStore) throws IOException {
    this.transform = transform;
    final ProgressLogger pl = new ProgressLogger(LOGGER);
    pl.displayLocalSpeed = true;
    pl.displayFreeMemory = true;
    final RandomGenerator random = new XorShift1024StarRandomGenerator();
    pl.itemsName = "keys";

    final boolean givenChunkedHashStore = chunkedHashStore != null;
    if (!givenChunkedHashStore) {
        if (keys == null)
            throw new IllegalArgumentException(
                    "If you do not provide a chunked hash store, you must provide the keys");
        chunkedHashStore = new ChunkedHashStore<T>(transform, pl);
        chunkedHashStore.reset(random.nextLong());
        chunkedHashStore.addAll(keys.iterator());
    }
    n = chunkedHashStore.size();
    defRetValue = -1; // For the very few cases in which we can decide

    if (n == 0) {
        rankMean = escape = width = 0;
        firstFunction = secondFunction = null;
        remap = null;
        if (!givenChunkedHashStore)
            chunkedHashStore.close();
        return;
    }

    // Compute distribution of values and maximum number of bits.
    int w = 0, size;
    long v;
    final Long2LongOpenHashMap counts = new Long2LongOpenHashMap();
    counts.defaultReturnValue(-1);
    for (LongIterator i = values.iterator(); i.hasNext();) {
        v = i.nextLong();
        counts.put(v, counts.get(v) + 1);
        size = Fast.length(v);
        if (size > w)
            w = size;
    }

    this.width = w;
    final int m = counts.size();

    LOGGER.debug("Generating two-steps MWHC function with " + w + " output bits...");

    // Sort keys by reverse frequency
    final long[] keysArray = counts.keySet().toLongArray(new long[m]);
    LongArrays.quickSort(keysArray, 0, keysArray.length, new AbstractLongComparator() {
        private static final long serialVersionUID = 1L;

        public int compare(final long a, final long b) {
            return Long.signum(counts.get(b) - counts.get(a));
        }
    });

    long mean = 0;
    for (int i = 0; i < keysArray.length; i++)
        mean += i * counts.get(keysArray[i]);
    rankMean = (double) mean / n;

    // Analyze data and choose a threshold
    long post = n, bestCost = Long.MAX_VALUE;
    int pos = 0, best = -1;

    // Examine every possible choice for r. Note that r = 0 implies one function, so we do not need to test the case r == w.
    for (int r = 0; r < w && pos < m; r++) {

        /* This cost function is dependent on the implementation of MWHCFunction. 
         * Note that for r = 0 we are actually computing the cost of a single function (the first one). */
        final long cost = (long) Math.min(HypergraphSorter.GAMMA * n * 1.126 + n * r,
                HypergraphSorter.GAMMA * n * r)
                + (long) Math.min(HypergraphSorter.GAMMA * post * 1.126 + post * w,
                        HypergraphSorter.GAMMA * post * w)
                + pos * Long.SIZE;

        if (cost < bestCost) {
            best = r;
            bestCost = cost;
        }

        /* We add to pre and subtract from post the counts of keys from position (1<<r)-1 to position (1<<r+1)-1. */
        for (int j = 0; j < (1 << r) && pos < m; j++) {
            final long c = counts.get(keysArray[pos++]);
            post -= c;
        }
    }

    if (ASSERTS)
        assert pos == m;

    counts.clear();
    counts.trim();

    // We must keep the remap array small.
    if (best >= Integer.SIZE)
        best = Integer.SIZE - 1;

    LOGGER.debug("Best threshold: " + best);
    escape = (1 << best) - 1;
    System.arraycopy(keysArray, 0, remap = new long[escape], 0, remap.length);
    final Long2LongOpenHashMap map = new Long2LongOpenHashMap();
    map.defaultReturnValue(-1);
    for (int i = 0; i < escape; i++)
        map.put(remap[i], i);

    if (best != 0) {
        firstFunction = new MWHCFunction.Builder<T>().keys(keys).transform(transform).store(chunkedHashStore)
                .values(new AbstractLongBigList() {
                    public long getLong(long index) {
                        long value = map.get(values.getLong(index));
                        return value == -1 ? escape : value;
                    }

                    public long size64() {
                        return n;
                    }
                }, best).indirect().build();

        LOGGER.debug("Actual bit cost per key of first function: " + (double) firstFunction.numBits() / n);
    } else
        firstFunction = null;

    chunkedHashStore.filter(new Predicate() {
        public boolean evaluate(Object triple) {
            return firstFunction == null || firstFunction.getLongByTriple((long[]) triple) == escape;
        }
    });

    secondFunction = new MWHCFunction.Builder<T>().store(chunkedHashStore).values(values, w).indirect().build();

    this.seed = chunkedHashStore.seed();
    if (!givenChunkedHashStore)
        chunkedHashStore.close();

    LOGGER.debug("Actual bit cost per key of second function: " + (double) secondFunction.numBits() / n);

    LOGGER.info("Actual bit cost per key: " + (double) numBits() / n);
    LOGGER.info("Completed.");

}

From source file:net.sourceforge.fenixedu.domain.accounting.report.GratuityReportQueueJob.java

public static List<GratuityReportQueueJob> retrieveAllGeneratedReports(final ExecutionYear executionYear) {
    List<GratuityReportQueueJob> reports = new ArrayList<GratuityReportQueueJob>();

    CollectionUtils.select(executionYear.getGratuityReportQueueJobsSet(), new Predicate() {

        @Override//from   ww w .  j a  v a  2 s  . com
        public boolean evaluate(Object arg0) {
            GratuityReportQueueJob gratuityQueueJob = (GratuityReportQueueJob) arg0;

            return gratuityQueueJob.getDone();
        }

    }, reports);

    return reports;
}

From source file:edu.kit.dama.staging.services.impl.download.DownloadInformationServiceLocal.java

@Override
@SecuredMethod(roleRequired = Role.MEMBER)
public DownloadInformation scheduleDownload(DigitalObjectId pDigitalObjectId, IFileTree pFileTree,
        Map<String, String> pProperties, IAuthorizationContext pSecurityContext)
        throws TransferPreparationException {
    LOGGER.info("Executing scheduleDownload({}, {})", new Object[] { pDigitalObjectId, pProperties });

    if (null == pDigitalObjectId) {
        throw new IllegalArgumentException("Argument 'pDigitalObjectId' must not be null");
    }/*from   w ww.ja va  2s.  c om*/
    LOGGER.debug("Checking for digital object with id '{}'", pDigitalObjectId);
    checkObject(pDigitalObjectId, pSecurityContext);

    LOGGER.debug("Searching for default staging processors for group '{}'", pSecurityContext.getGroupId());
    List<StagingProcessor> processors = StagingConfigurationPersistence.getSingleton()
            .findStagingProcessorsForGroup(pSecurityContext.getGroupId().getStringRepresentation());
    LOGGER.debug("Checking {} staging processor(s)", processors.size());
    List<StagingProcessor> defaultProcessors = new ArrayList<>();
    for (StagingProcessor processor : processors) {
        if (processor.isDefaultOn() && processor.isDownloadProcessingSupported() && !processor.isDisabled()) {
            LOGGER.debug(" - Adding default download staging processor " + processor.getUniqueIdentifier());
            defaultProcessors.add(processor);
        }
    }

    TransferClientProperties props = TransferClientPropertiesUtils.mapToProperties(pProperties);

    final IFileTree tmpTree;
    if (pFileTree != null) {
        //download the provided selection only
        tmpTree = pFileTree;
    } else {
        //download entire object
        try {
            tmpTree = DataOrganizationServiceLocal.getSingleton().loadFileTree(pDigitalObjectId,
                    pSecurityContext);
        } catch (EntityNotFoundException enfe) {
            throw new TransferPreparationException("Unable to prepare download for digital object '"
                    + pDigitalObjectId + "'. No data organization content found.", enfe);
        }
    }

    LOGGER.debug("Converting temporary tree to persistable version");
    IFileTree treeToDownload = DataOrganizationUtils.copyTree(tmpTree);
    final String accessPointId = props.getStagingAccessPointId();
    DownloadInformation existingEntity = null;
    if (accessPointId != null) {
        List<DownloadInformation> existingEntities = persistenceImpl
                .getEntitiesByDigitalObjectId(pDigitalObjectId, pSecurityContext);
        LOGGER.debug("Searching for download associated with access point {}", accessPointId);
        existingEntity = (DownloadInformation) CollectionUtils.find(existingEntities, new Predicate() {

            @Override
            public boolean evaluate(Object o) {
                return accessPointId.equals(((DownloadInformation) o).getAccessPointId());
            }
        });
    } else {
        LOGGER.warn("AccessPointId is null, skipping search for download with access point.");
    }

    DownloadInformation activeDownload;

    if (existingEntity != null) {
        LOGGER.debug("Existing download found. Removing local data.");
        StagingService.getSingleton().flushDownload(existingEntity.getId(), pSecurityContext);
        LOGGER.debug("Setting download to SCHEDULED");
        //reset status and error message
        existingEntity.setStatusEnum(DOWNLOAD_STATUS.SCHEDULED);
        existingEntity.setErrorMessage(null);
        existingEntity.setClientAccessUrl(null);
        //reset expire timer
        existingEntity.setExpiresAt(System.currentTimeMillis() + DownloadInformation.DEFAULT_LIFETIME);
        existingEntity.setAccessPointId(props.getStagingAccessPointId());
        LOGGER.debug("Merging processors of transfer properties with processors of existing entity.");
        Collection<StagingProcessor> mergedProcessors = mergeStagingProcessors(props.getProcessors(),
                existingEntity.getStagingProcessors());
        LOGGER.debug("Merging in default processors.");
        mergedProcessors = mergeStagingProcessors(mergedProcessors, defaultProcessors);

        existingEntity.clearStagingProcessors();
        for (final StagingProcessor proc : mergedProcessors) {
            LOGGER.debug("Adding staging processor with id {} to download.", proc.getId());
            existingEntity.addStagingProcessor(proc);
        }

        //merge the entity with the database
        activeDownload = persistenceImpl.mergeEntity(existingEntity, pSecurityContext);
    } else {
        LOGGER.debug("No entity found for ID '{}'.", pDigitalObjectId);
        LOGGER.debug("Merging processors of transfer properties with default processors.");
        Collection<StagingProcessor> merged = mergeStagingProcessors(props.getProcessors(), defaultProcessors);
        LOGGER.debug("Creating new download entity. ");
        //no entity for pDigitalObjectId found...create a new one
        activeDownload = persistenceImpl.createEntity(pDigitalObjectId, props.getStagingAccessPointId(), merged,
                pSecurityContext);
    }

    LOGGER.debug("Creating DownloadPreparationHandler");
    DownloadPreparationHandler handler = new DownloadPreparationHandler(persistenceImpl, activeDownload,
            treeToDownload);
    LOGGER.debug("Scheduling download");
    handler.prepareTransfer(TransferClientPropertiesUtils.mapToProperties(pProperties), pSecurityContext);
    LOGGER.debug("Download scheduling finished. Obtaining updated entity.");
    activeDownload = persistenceImpl.getEntityById(activeDownload.getId(), pSecurityContext);
    LOGGER.debug("Returning download entity.");
    return activeDownload;
}

From source file:net.sourceforge.fenixedu.domain.candidacyProcess.IndividualCandidacy.java

public List<Formation> getConcludedFormationList() {
    return new ArrayList<Formation>(CollectionUtils.select(getFormationsSet(), new Predicate() {

        @Override/* w ww  .  j  ava  2 s  .c  o  m*/
        public boolean evaluate(Object arg0) {
            return ((Formation) arg0).getConcluded();
        }

    }));
}

From source file:net.shopxx.entity.Order.java

@Transient
public boolean getIsDelivery() {
    return CollectionUtils.exists(getOrderItems(), new Predicate() {
        @Override/*from  w  w w  .ja v  a2s .  c  om*/
        public boolean evaluate(Object object) {
            OrderItem orderItem = (OrderItem) object;
            return orderItem != null && BooleanUtils.isTrue(orderItem.getIsDelivery());
        }
    });
}

From source file:module.siadap.domain.wrappers.UnitSiadapWrapper.java

/**
 * //w  ww  .  j a  v  a2 s.  c  o  m
 * @param universe
 * @param belongsToInstitutionalQuota
 * @return the set of persons that should be validated i.e. that have been
 *         assigned relevant or above, or inadequate evaluations by the
 *         evaluator, from this unit and those below (using the
 *         {@link SiadapYearConfiguration#getHarmonizationUnitRelations()})
 */
protected Map<Integer, Collection<PersonSiadapWrapper>> getValidationPersonSiadapWrappers(
        final SiadapUniverse universe, final boolean belongsToInstitutionalQuota) {

    List<PersonSiadapWrapper> listPeopleToUse = new ArrayList<PersonSiadapWrapper>();

    final int[] counter = new int[1];
    getUnitAttachedPersons(getUnit(), listPeopleToUse, true, new Predicate() {

        @Override
        public boolean evaluate(Object arg0) {
            PersonSiadapWrapper wrapper = (PersonSiadapWrapper) arg0;
            if (wrapper.isQuotaAware() != belongsToInstitutionalQuota) {
                return false;
            }
            if (wrapper.isWithSkippedEval(universe)) {
                return false;
            }
            counter[0]++;
            SiadapGlobalEvaluation totalQualitativeEvaluationScoring = wrapper
                    .getTotalQualitativeEvaluationScoringObject(universe);
            switch (totalQualitativeEvaluationScoring) {
            case EXCELLENCY:
            case HIGH:
            case LOW:
            case ZERO:
                return true;
            case MEDIUM:
            case NONEXISTING:
            case WITHSKIPPEDEVAL:
                return false;
            }
            return false;
        }
    }, Collections.singleton(getConfiguration().getHarmonizationUnitRelations()),
            universe.getHarmonizationRelation(getConfiguration()));
    HashMap<Integer, Collection<PersonSiadapWrapper>> hashMap = new HashMap<Integer, Collection<PersonSiadapWrapper>>();
    Collections.sort(listPeopleToUse,
            PersonSiadapWrapper.PERSON_COMPARATOR_BY_NAME_FALLBACK_YEAR_THEN_PERSON_OID);
    hashMap.put(new Integer(counter[0]), listPeopleToUse);
    return hashMap;

}