Example usage for org.apache.commons.collections Transformer transform

List of usage examples for org.apache.commons.collections Transformer transform

Introduction

In this page you can find the example usage for org.apache.commons.collections Transformer transform.

Prototype

Object transform(Object input);

Source Link

Usage

From source file:org.examproject.tweet.service.SimplePermalinkService.java

/**
 * get the tweet dto list by date.//from w  w  w.  ja va 2s. c  o m
 */
@Override
public List<TweetDto> getTweetListByDate(String userName, Integer year, Integer month, Integer day) {
    LOG.debug("called.");
    try {
        // create a date conditions.
        Transformer beginDateTransformer = new DayBeginDateTransformer();
        Transformer endDateTransformer = new DayEndDateTransformer();
        DateValue dateValue = new DateValue(year, month, day);
        Date begin = (Date) beginDateTransformer.transform(dateValue);
        Date end = (Date) endDateTransformer.transform(dateValue);

        // get the tweet list.
        List<Tweet> tweetList = tweetRepository.findByNameAndDateBetween(userName, begin, end);
        LOG.debug("permalink tweet count: " + tweetList.size());

        // map the object.
        List<TweetDto> tweetDtoList = new ArrayList<TweetDto>();
        for (Tweet tweet : tweetList) {
            TweetDto tweetDto = context.getBean(TweetDto.class);
            // map the entity-object to the dto-object.
            mapper.map(tweet, tweetDto);
            tweetDtoList.add(tweetDto);
        }

        return tweetDtoList;

    } catch (Exception e) {
        LOG.error("an error occurred: " + e.getMessage());
        throw new RuntimeException(e);
    }
}

From source file:org.examproject.tweet.service.SimpleTagcrowdService.java

/**
 * update the entity.//from  w  w  w.j a  v a 2s .  co  m
 */
@Override
public List<TagcrowdDto> update(TweetDto tweetDto) {
    LOG.debug("called.");
    try {
        Long statusId = Long.parseLong(tweetDto.getStatusId());
        String content = tweetDto.getText();
        String userName = tweetDto.getUserName();

        // get the korean word only.
        Predicate predicate = new IsContainKrHangulCodePredicate();
        boolean isNeed = predicate.evaluate(content);
        if (isNeed) {

            // split words from the sentence.
            Transformer transformer = new SentenceToWordsTransformer();
            String[] words = (String[]) transformer.transform(content);

            // process all words.
            for (int i = 0; i < words.length; i++) {
                String oneWord = words[i];
                boolean isKr = predicate.evaluate(oneWord);
                if (isKr) {

                    // get the vocab entity.
                    Vocab vocab = context.getBean(Vocab.class);

                    // get the word id.
                    List<Word> wordList = wordRepository.findByText(oneWord);

                    // if the new word.
                    if (wordList.isEmpty()) {

                        // create this word.
                        Word wordEntity = context.getBean(Word.class);
                        wordEntity.setText(oneWord);
                        Word newWordEntity = (Word) wordRepository.save(wordEntity);
                        vocab.setWord(newWordEntity);
                    }
                    // already exist.
                    else {
                        Word wordEntity = wordList.get(0);
                        vocab.setWord(wordEntity);
                    }

                    // set vocabulary this tweet!
                    Tweet tweet = tweetRepository.findById(statusId);
                    vocab.setStatus(tweet);
                    vocab.setName(userName);
                    vocabRepository.save(vocab);
                }
            }
        }

        // TODO: return tagcrowd dto list..
        List<TagcrowdDto> tagcrowdDtoList = null;
        return tagcrowdDtoList;
    } catch (Exception e) {
        LOG.error("an error occurred: " + e.getMessage());
        throw new RuntimeException(e);
    }
}

From source file:org.mule.endpoint.URIBuilder.java

private OrderedQueryParameters appendAddress(StringBuilder buffer, Transformer tokenProcessor,
        Transformer tokenEncoder) {
    if (null != address) {
        int index = address.indexOf(QUERY);
        if (index > -1) {
            buffer.append(tokenProcessor.transform(address.substring(0, index)));
            return parseQueries((String) tokenProcessor.transform(address.substring(index + 1)));
        } else {//from  w  w w .  ja v  a2 s  . co m
            buffer.append(tokenProcessor.transform(address));
            return new OrderedQueryParameters();
        }
    } else {
        constructAddress(buffer, tokenProcessor, tokenEncoder);
        return new OrderedQueryParameters();
    }
}

From source file:org.mule.endpoint.URIBuilder.java

private void constructAddress(StringBuilder buffer, Transformer tokenProcessor, Transformer tokenEncoder) {
    buffer.append(protocol);/*from   w w w.jav a 2  s .c om*/
    buffer.append(DOTS_SLASHES);
    boolean atStart = true;
    if (null != user) {
        buffer.append(tokenEncoder.transform(tokenProcessor.transform(user)));
        if (null != password) {
            buffer.append(":");
            buffer.append(tokenEncoder.transform(tokenProcessor.transform(password)));
        }
        buffer.append("@");
        atStart = false;
    }
    if (null != host) {
        buffer.append(tokenProcessor.transform(host));
        if (null != port) {
            buffer.append(":");
            buffer.append(tokenProcessor.transform(port));
        }
        atStart = false;
    }
    if (null != path) {
        if (!atStart && !path.startsWith("/")) {
            buffer.append("/");
        }
        buffer.append(tokenProcessor.transform(path));
    }
}

From source file:org.objectstyle.cayenne.exp.Expression.java

/**
 * Creates a transformed copy of this expression, applying 
 * transformation provided by Transformer to all its nodes.
 * Null transformer will result in an identical deep copy of
 * this expression./* w w  w. j a  v a 2s . co  m*/
 * 
 * <p>To force a node and its children to be pruned from the 
 * copy, Transformer should return null for a given node.
 * 
 * <p>There is one limitation on what Transformer is expected to do: 
 * if a node is an Expression it must be transformed to null
 * or another Expression. Any other object type would result in an 
 * exception. 
 * 
 *
 * @since 1.1
 */
public Expression transform(Transformer transformer) {

    Expression copy = shallowCopy();
    int count = getOperandCount();
    for (int i = 0, j = 0; i < count; i++) {
        Object operand = getOperand(i);
        Object transformedChild = operand;

        if (operand instanceof Expression) {
            transformedChild = ((Expression) operand).transform(transformer);
        } else if (transformer != null) {
            transformedChild = transformer.transform(operand);
        }

        if (transformedChild != null) {
            Object value = (transformedChild != nullValue) ? transformedChild : null;
            copy.setOperand(j, value);
            j++;
        } else if (pruneNodeForPrunedChild(operand)) {
            // bail out early...
            return null;
        }
    }

    // all the children are processed, only now transform this copy 
    return (transformer != null) ? (Expression) transformer.transform(copy) : copy;
}

From source file:org.opencms.cache.CmsVfsMemoryObjectCache.java

/**
 * Uses a transformer for loading an object from a path if it has not already been cached, and then caches it.<p>
 * //from  w  ww.j  a v  a2  s  .c  o  m
 * @param cms the CMS context 
 * @param rootPath the root path from which the object should be loaded 
 * @param function the function which should load the object from VFS if it isn't already cached 
 * 
 * @return the loaded object 
 */
public Object loadVfsObject(CmsObject cms, String rootPath, Transformer function) {

    Object result = getCachedObject(cms, rootPath);
    if (result == null) {
        result = function.transform(rootPath);
        putCachedObject(cms, rootPath, result);
    }
    return result;
}

From source file:org.openvpms.archetype.rules.product.ProductPriceUpdater.java

/**
 * Collects product prices from a list of product-supplier relationships, updated by the specified transformer.
 *
 * @param relationships the product-supplier relationships
 * @param transformer   returns the updated product prices associated with each relationship
 * @param save          if {@code true}, save updated prices
 * @return a list of updated prices//from   w  w  w .java  2s  .  com
 * @throws ArchetypeServiceException for any archetype service error
 */
private List<ProductPrice> collect(List<EntityRelationship> relationships, Transformer transformer,
        boolean save) {
    List<ProductPrice> result = null;
    for (EntityRelationship relationship : relationships) {
        List<ProductPrice> prices = (List<ProductPrice>) transformer.transform(relationship);
        if (!prices.isEmpty()) {
            if (result == null) {
                result = prices;
            } else {
                result.addAll(prices);
            }
        }
    }
    if (result == null) {
        result = Collections.emptyList();
    } else if (save) {
        service.save(result);
    }
    return result;
}

From source file:org.sipfoundry.sipxconfig.search.SearchManagerImpl.java

private List hits2beans(TopDocs docs, Transformer transformer, IndexSearcher searcher, int firstItem,
        int pageSize) throws IOException {
    final int hitCount = docs.scoreDocs.length;
    List results = new ArrayList(hitCount);
    // if (transformer != null) {
    // results = ListUtils.predicatedList(results, NotNullPredicate.INSTANCE);
    // results = ListUtils.transformedList(results, transformer);
    // }//from  w  ww . j  a va  2  s  .c  o m

    int from = firstItem < 0 ? 0 : firstItem;
    int to = pageSize < 0 ? hitCount : Math.min(hitCount, firstItem + pageSize);
    for (int i = from; i < to; i++) {
        Document document = searcher.doc(docs.scoreDocs[i].doc);
        Identity identity = m_beanAdaptor.getBeanIdentity(document);
        if (identity == null) {
            continue;
        }
        if (transformer != null) {
            Object bean = transformer.transform(identity);
            if (bean != null) {
                results.add(bean);
            }
        } else {
            results.add(identity);
        }
    }

    return results;
}

From source file:ubic.gemma.analysis.expression.diff.LinearModelAnalyzer.java

/**
 * @param bioAssaySet source data//w  w w . ja v  a 2 s .  c om
 * @param config
 * @param dmatrix data
 * @param samplesUsed analyzed
 * @param factors included in the model
 * @param subsetFactorValue null unless analyzing a subset (only used for book-keeping)
 * @return analysis, or null if there was a problem.
 */
private DifferentialExpressionAnalysis doAnalysis(BioAssaySet bioAssaySet,
        DifferentialExpressionAnalysisConfig config, ExpressionDataDoubleMatrix dmatrix,
        List<BioMaterial> samplesUsed, List<ExperimentalFactor> factors, FactorValue subsetFactorValue) {

    if (factors.isEmpty()) {
        log.error("Must provide at least one factor");
        return null;
    }

    if (samplesUsed.size() <= factors.size()) {
        log.error("Must have more samples than factors");
        return null;
    }

    final Map<String, Collection<ExperimentalFactor>> label2Factors = getRNames(factors);

    Map<ExperimentalFactor, FactorValue> baselineConditions = ExperimentalDesignUtils
            .getBaselineConditions(samplesUsed, factors);

    QuantitationType quantitationType = dmatrix.getQuantitationTypes().iterator().next();

    ExperimentalFactor interceptFactor = determineInterceptFactor(factors, quantitationType);

    /*
     * Build our factor terms, with interactions handled specially
     */
    List<String[]> interactionFactorLists = new ArrayList<String[]>();
    ObjectMatrix<String, String, Object> designMatrix = ExperimentalDesignUtils.buildDesignMatrix(factors,
            samplesUsed, baselineConditions);

    setupFactors(designMatrix, baselineConditions);

    String modelFormula = "";
    boolean oneSampleTtest = interceptFactor != null && factors.size() == 1;
    if (oneSampleTtest) {
        modelFormula = " ";
    } else {
        modelFormula = buildModelFormula(config, label2Factors, interceptFactor, interactionFactorLists);
    }

    dmatrix = filterAndLogTransform(quantitationType, dmatrix);
    DoubleMatrix<CompositeSequence, BioMaterial> namedMatrix = dmatrix.getMatrix();

    if (log.isDebugEnabled())
        outputForDebugging(dmatrix, designMatrix);

    /*
     * PREPARATION FOR 'NATIVE' FITTING
     */
    DoubleMatrix<String, String> sNamedMatrix = makeDataMatrix(designMatrix, namedMatrix);
    DesignMatrix properDesignMatrix = makeDesignMatrix(designMatrix, interactionFactorLists,
            baselineConditions);

    /*
     * Run the analysis NOTE this can be simplified if we strip out R code.
     */
    final Map<String, LinearModelSummary> rawResults = runAnalysis(namedMatrix, sNamedMatrix, label2Factors,
            modelFormula, properDesignMatrix, interceptFactor, interactionFactorLists, baselineConditions,
            quantitationType);

    if (rawResults.size() == 0) {
        log.error("Got no results from the analysis");
        return null;
    }

    /*
     * Initialize data structures we need to hold results
     */

    // this used to be a Set, but a List is much faster.
    Map<String, List<DifferentialExpressionAnalysisResult>> resultLists = new HashMap<String, List<DifferentialExpressionAnalysisResult>>();
    Map<String, List<Double>> pvaluesForQvalue = new HashMap<String, List<Double>>();
    for (String factorName : label2Factors.keySet()) {
        if (properDesignMatrix.getDroppedFactors().contains(factorName)) {
            continue;
        }
        resultLists.put(factorName, new ArrayList<DifferentialExpressionAnalysisResult>());
        pvaluesForQvalue.put(factorName, new ArrayList<Double>());
    }
    addinteraction: for (String[] fs : interactionFactorLists) {
        for (String f : fs) {
            if (properDesignMatrix.getDroppedFactors().contains(f)) {
                continue addinteraction;
            }
        }
        String intF = StringUtils.join(fs, ":");
        resultLists.put(intF, new ArrayList<DifferentialExpressionAnalysisResult>());
        pvaluesForQvalue.put(intF, new ArrayList<Double>());
    }

    if (pvaluesForQvalue.isEmpty()) {
        log.warn(
                "No results were obtained for the current stage of analysis, possibly due to dropped factors.");
        return null;
    }

    /*
     * Create result objects for each model fit. Keeping things in order is important.
     */
    final Transformer rowNameExtractor = TransformerUtils.invokerTransformer("getId");
    boolean warned = false;
    int notUsable = 0;
    int processed = 0;
    for (CompositeSequence el : namedMatrix.getRowNames()) {

        if (++processed % 15000 == 0) {
            log.info("Processed results for " + processed + " elements ...");
        }

        LinearModelSummary lm = rawResults.get(rowNameExtractor.transform(el).toString());

        if (log.isDebugEnabled())
            log.debug(el.getName() + "\n" + lm);

        if (lm == null) {
            if (!warned) {
                log.warn("No result for " + el + ", further warnings suppressed");
                warned = true;
            }
            notUsable++;
            continue;
        }

        for (String factorName : label2Factors.keySet()) {

            if (!pvaluesForQvalue.containsKey(factorName)) {
                // was dropped.
                continue;
            }

            Double overallPValue = null;
            DifferentialExpressionAnalysisResult probeAnalysisResult = DifferentialExpressionAnalysisResult.Factory
                    .newInstance();
            probeAnalysisResult.setProbe(el);

            if (lm.getCoefficients() == null) {
                // probeAnalysisResult.setPvalue( null );
                // pvaluesForQvalue.get( factorName ).add( overallPValue );
                // resultLists.get( factorName ).add( probeAnalysisResult );
                notUsable++;
                continue;
            }

            Collection<ExperimentalFactor> factorsForName = label2Factors.get(factorName);

            if (factorsForName.size() > 1) {
                /*
                 * Interactions
                 */
                if (factorsForName.size() > 2) {
                    log.error("Handling more than two-way interactions is not implemented");
                    return null;
                }

                assert factorName.contains(":");
                String[] factorNames = StringUtils.split(factorName, ":");
                assert factorNames.length == factorsForName.size();
                overallPValue = lm.getInteractionEffectP(factorNames);

                if (overallPValue != null && !Double.isNaN(overallPValue)) {

                    Map<String, Double> interactionContrastTStats = lm.getContrastTStats(factorName);
                    Map<String, Double> interactionContrastCoeffs = lm.getContrastCoefficients(factorName);
                    Map<String, Double> interactionContrastPValues = lm.getContrastPValues(factorName);

                    for (String term : interactionContrastPValues.keySet()) {
                        Double contrastPvalue = interactionContrastPValues.get(term);

                        makeContrast(probeAnalysisResult, factorsForName, term, factorName, contrastPvalue,
                                interactionContrastTStats, interactionContrastCoeffs);

                    }
                } else {
                    if (!warned) {
                        log.warn("Interaction could not be computed for " + el
                                + ", further warnings suppressed");
                        warned = true;
                    }

                    if (log.isDebugEnabled())
                        log.debug("Interaction could not be computed for " + el
                                + ", further warnings suppressed");

                    notUsable++; // will over count?
                    continue;
                }

            } else {

                /*
                 * Main effect
                 */

                assert factorsForName.size() == 1;
                ExperimentalFactor experimentalFactor = factorsForName.iterator().next();

                if (interceptFactor != null && factorsForName.size() == 1
                        && experimentalFactor.equals(interceptFactor)) {
                    overallPValue = lm.getInterceptP();
                } else {
                    overallPValue = lm.getMainEffectP(factorName);
                }

                /*
                 * Add contrasts unless overallpvalue is NaN
                 */
                if (overallPValue != null && !Double.isNaN(overallPValue)) {

                    Map<String, Double> mainEffectContrastTStats = lm.getContrastTStats(factorName);
                    Map<String, Double> mainEffectContrastPvalues = lm.getContrastPValues(factorName);
                    Map<String, Double> mainEffectContrastCoeffs = lm.getContrastCoefficients(factorName);

                    for (String term : mainEffectContrastPvalues.keySet()) {

                        Double contrastPvalue = mainEffectContrastPvalues.get(term);

                        makeContrast(probeAnalysisResult, factorsForName, term, factorName, contrastPvalue,
                                mainEffectContrastTStats, mainEffectContrastCoeffs);

                    }
                } else {
                    if (!warned) {
                        log.warn("ANOVA could not be done for " + experimentalFactor + " on " + el
                                + ", further warnings suppressed");
                        warned = true;
                    }

                    if (log.isDebugEnabled())
                        log.debug("ANOVA could not be done for " + experimentalFactor + " on " + el);

                    notUsable++; // will over count?
                    continue;
                }
            }

            assert !Double.isNaN(overallPValue) : "We should not be keeping non-number pvalues (null or NaNs)";

            probeAnalysisResult.setPvalue(nan2Null(overallPValue));
            pvaluesForQvalue.get(factorName).add(overallPValue);
            resultLists.get(factorName).add(probeAnalysisResult);
        } // over terms

    } // over probes

    if (notUsable > 0) {
        log.info(notUsable + " elements or results were not usable - model could not be fit, etc.");
    }

    getRanksAndQvalues(resultLists, pvaluesForQvalue);

    DifferentialExpressionAnalysis expressionAnalysis = makeAnalysisEntity(bioAssaySet, config, label2Factors,
            baselineConditions, interceptFactor, interactionFactorLists, oneSampleTtest, resultLists,
            subsetFactorValue);

    log.info("Analysis processing phase done ...");

    return expressionAnalysis;
}

From source file:ubic.gemma.core.analysis.expression.diff.LinearModelAnalyzer.java

/**
 * @param bioAssaySet       source data, could be a SubSet
 * @param dmatrix           data (for the subset, if it's a subset)
 * @param samplesUsed       analyzed/*from  ww w  .j a  v a  2 s.  com*/
 * @param factors           included in the model
 * @param subsetFactorValue null unless analyzing a subset (only used for book-keeping)
 * @return analysis, or null if there was a problem.
 */
private DifferentialExpressionAnalysis doAnalysis(BioAssaySet bioAssaySet,
        DifferentialExpressionAnalysisConfig config, ExpressionDataDoubleMatrix dmatrix,
        List<BioMaterial> samplesUsed, List<ExperimentalFactor> factors, FactorValue subsetFactorValue) {

    // We may want to change this to fall back to running normally, though the real fix is to just finish the ebayes implementation.
    if (config.getModerateStatistics() && dmatrix.hasMissingValues()) {
        throw new UnsupportedOperationException(
                "Ebayes cannot be used when there are values missing in the data");
    }

    if (factors.isEmpty()) {
        LinearModelAnalyzer.log.error("Must provide at least one factor");
        return null;
    }

    if (samplesUsed.size() <= factors.size()) {
        LinearModelAnalyzer.log.error("Must have more samples than factors");
        return null;
    }

    final Map<String, Collection<ExperimentalFactor>> label2Factors = this.getRNames(factors);

    Map<ExperimentalFactor, FactorValue> baselineConditions = ExperimentalDesignUtils
            .getBaselineConditions(samplesUsed, factors);

    this.dropIncompleteFactors(samplesUsed, factors, baselineConditions);

    if (factors.isEmpty()) {
        LinearModelAnalyzer.log
                .error("Must provide at least one factor; they were all removed due to incomplete values");
        return null;
    }

    QuantitationType quantitationType = dmatrix.getQuantitationTypes().iterator().next();

    ExperimentalFactor interceptFactor = this.determineInterceptFactor(factors, quantitationType);

    /*
     * Build our factor terms, with interactions handled specially
     */
    List<String[]> interactionFactorLists = new ArrayList<>();
    ObjectMatrix<String, String, Object> designMatrix = ExperimentalDesignUtils.buildDesignMatrix(factors,
            samplesUsed, baselineConditions);

    config.setBaseLineFactorValues(baselineConditions);

    boolean oneSampleTTest = interceptFactor != null && factors.size() == 1;
    if (!oneSampleTTest) {
        this.buildModelFormula(config, label2Factors, interactionFactorLists);
    }

    /*
     * FIXME: remove columns that are marked as outliers.
     */
    dmatrix = ExpressionDataDoubleMatrixUtil.filterAndLog2Transform(quantitationType, dmatrix);
    DoubleMatrix<CompositeSequence, BioMaterial> namedMatrix = dmatrix.getMatrix();

    DoubleMatrix1D librarySize = getLibrarySizes(config, dmatrix);

    if (LinearModelAnalyzer.log.isDebugEnabled())
        this.outputForDebugging(dmatrix, designMatrix);

    /*
     * PREPARATION FOR 'NATIVE' FITTING
     */
    DoubleMatrix<String, String> sNamedMatrix = LinearModelAnalyzer.makeDataMatrix(designMatrix, namedMatrix);
    DesignMatrix properDesignMatrix = this.makeDesignMatrix(designMatrix, interactionFactorLists,
            baselineConditions);

    /*
     * Run the analysis NOTE this can be simplified if we strip out R code.
     */
    final Map<String, LinearModelSummary> rawResults = this.runAnalysis(namedMatrix, sNamedMatrix,
            properDesignMatrix, librarySize, config);

    if (rawResults.size() == 0) {
        LinearModelAnalyzer.log.error("Got no results from the analysis");
        return null;
    }

    /*
     * Initialize data structures we need to hold results.
     */

    // this used to be a Set, but a List is much faster.
    Map<String, List<DifferentialExpressionAnalysisResult>> resultLists = new HashMap<>();
    Map<String, List<Double>> pvaluesForQvalue = new HashMap<>();

    for (String factorName : label2Factors.keySet()) {
        resultLists.put(factorName, new ArrayList<DifferentialExpressionAnalysisResult>());
        pvaluesForQvalue.put(factorName, new ArrayList<Double>());
    }

    for (String[] fs : interactionFactorLists) {
        String intF = StringUtils.join(fs, ":");
        resultLists.put(intF, new ArrayList<DifferentialExpressionAnalysisResult>());
        pvaluesForQvalue.put(intF, new ArrayList<Double>());
    }

    if (pvaluesForQvalue.isEmpty()) {
        LinearModelAnalyzer.log.warn("No results were obtained for the current stage of analysis.");
        return null;
    }

    /*
     * Create result objects for each model fit. Keeping things in order is important.
     */
    final Transformer rowNameExtractor = TransformerUtils.invokerTransformer("getId");
    boolean warned = false;
    int notUsable = 0;
    int processed = 0;
    for (CompositeSequence el : namedMatrix.getRowNames()) {

        if (++processed % 15000 == 0) {
            LinearModelAnalyzer.log.info("Processed results for " + processed + " elements ...");
        }

        LinearModelSummary lm = rawResults.get(rowNameExtractor.transform(el).toString());

        if (LinearModelAnalyzer.log.isDebugEnabled())
            LinearModelAnalyzer.log.debug(el.getName() + "\n" + lm);

        if (lm == null) {
            if (!warned) {
                LinearModelAnalyzer.log.warn("No result for " + el + ", further warnings suppressed");
                warned = true;
            }
            notUsable++;
            continue;
        }

        for (String factorName : label2Factors.keySet()) {

            if (!pvaluesForQvalue.containsKey(factorName)) {
                // was dropped.
                continue;
            }

            Double overallPValue;
            DifferentialExpressionAnalysisResult probeAnalysisResult = DifferentialExpressionAnalysisResult.Factory
                    .newInstance();
            probeAnalysisResult.setProbe(el);

            if (lm.getCoefficients() == null) {
                // probeAnalysisResult.setPvalue( null );
                // pvaluesForQvalue.get( factorName ).add( overallPValue );
                // resultLists.get( factorName ).add( probeAnalysisResult );
                notUsable++;
                continue;
            }

            Collection<ExperimentalFactor> factorsForName = label2Factors.get(factorName);

            if (factorsForName.size() > 1) {
                /*
                 * Interactions
                 */
                if (factorsForName.size() > 2) {
                    LinearModelAnalyzer.log.error("Handling more than two-way interactions is not implemented");
                    return null;
                }

                assert factorName.contains(":");
                String[] factorNames = StringUtils.split(factorName, ":");
                assert factorNames.length == factorsForName.size();
                overallPValue = lm.getInteractionEffectP(factorNames);

                if (overallPValue != null && !Double.isNaN(overallPValue)) {

                    Map<String, Double> interactionContrastTStats = lm.getContrastTStats(factorName);
                    Map<String, Double> interactionContrastCoeffs = lm.getContrastCoefficients(factorName);
                    Map<String, Double> interactionContrastPValues = lm.getContrastPValues(factorName);

                    for (String term : interactionContrastPValues.keySet()) {
                        Double contrastPvalue = interactionContrastPValues.get(term);

                        this.makeContrast(probeAnalysisResult, factorsForName, term, factorName, contrastPvalue,
                                interactionContrastTStats, interactionContrastCoeffs);

                    }
                } else {
                    if (!warned) {
                        LinearModelAnalyzer.log.warn("Interaction could not be computed for " + el
                                + ", further warnings suppressed");
                        warned = true;
                    }

                    if (LinearModelAnalyzer.log.isDebugEnabled())
                        LinearModelAnalyzer.log.debug("Interaction could not be computed for " + el
                                + ", further warnings suppressed");

                    notUsable++; // will over count?
                    continue;
                }

            } else {

                /*
                 * Main effect
                 */
                assert factorsForName.size() == 1;
                ExperimentalFactor experimentalFactor = factorsForName.iterator().next();

                if (factorsForName.size() == 1 && experimentalFactor.equals(interceptFactor)) {
                    overallPValue = lm.getInterceptP();
                } else {
                    overallPValue = lm.getMainEffectP(factorName);
                }

                /*
                 * Add contrasts unless overall pvalue is NaN
                 */
                if (overallPValue != null && !Double.isNaN(overallPValue)) {

                    Map<String, Double> mainEffectContrastTStats = lm.getContrastTStats(factorName);
                    Map<String, Double> mainEffectContrastPvalues = lm.getContrastPValues(factorName);
                    Map<String, Double> mainEffectContrastCoeffs = lm.getContrastCoefficients(factorName);

                    for (String term : mainEffectContrastPvalues.keySet()) {

                        Double contrastPvalue = mainEffectContrastPvalues.get(term);

                        this.makeContrast(probeAnalysisResult, factorsForName, term, factorName, contrastPvalue,
                                mainEffectContrastTStats, mainEffectContrastCoeffs);

                    }
                } else {
                    if (!warned) {
                        LinearModelAnalyzer.log.warn("ANOVA could not be done for " + experimentalFactor
                                + " on " + el + ", further warnings suppressed");
                        warned = true;
                    }

                    if (LinearModelAnalyzer.log.isDebugEnabled())
                        LinearModelAnalyzer.log
                                .debug("ANOVA could not be done for " + experimentalFactor + " on " + el);

                    notUsable++; // will over count?
                    continue;
                }
            }

            assert !Double.isNaN(overallPValue) : "We should not be keeping non-number pvalues (null or NaNs)";

            probeAnalysisResult.setPvalue(this.nan2Null(overallPValue));
            pvaluesForQvalue.get(factorName).add(overallPValue);
            resultLists.get(factorName).add(probeAnalysisResult);
        } // over terms

    } // over probes

    if (notUsable > 0) {
        LinearModelAnalyzer.log
                .info(notUsable + " elements or results were not usable - model could not be fit, etc.");
    }

    this.getRanksAndQvalues(resultLists, pvaluesForQvalue);

    DifferentialExpressionAnalysis expressionAnalysis = this.makeAnalysisEntity(bioAssaySet, config,
            label2Factors, baselineConditions, interceptFactor, interactionFactorLists, oneSampleTTest,
            resultLists, subsetFactorValue);

    LinearModelAnalyzer.log.info("Analysis processing phase done ...");

    return expressionAnalysis;
}