Example usage for com.google.common.collect Sets newHashSetWithExpectedSize

List of usage examples for com.google.common.collect Sets newHashSetWithExpectedSize

Introduction

In this page you can find the example usage for com.google.common.collect Sets newHashSetWithExpectedSize.

Prototype

public static <E> HashSet<E> newHashSetWithExpectedSize(int expectedSize) 

Source Link

Document

Creates a HashSet instance, with a high enough initial table size that it should hold expectedSize elements without resizing.

Usage

From source file:com.android.build.gradle.internal.pipeline.IntermediateFolderUtils.java

private static void parseScopeLevelFolders(@NonNull File rootFolder, @NonNull Set<ContentType> types,
        @NonNull Set<Scope> requiredScopes, @NonNull InputGenerator generator) {
    File[] files = rootFolder.listFiles(File::isDirectory);

    if (files != null && files.length > 0) {
        for (File file : files) {
            Set<Scope> scopes = stringToScopes(file.getName());
            if (scopes != null) {
                // we need up to the requiredScopes, but no more.
                // content that only contains unwanted Scope can be safely dropped, however
                // content that is both in and out of Scope will trigger a runtime error.
                // check these are the scope we want, and only pass down scopes we care about.
                Set<Scope> limitedScopes = Sets.newHashSetWithExpectedSize(requiredScopes.size());
                boolean foundUnwanted = false;
                for (Scope scope : scopes) {
                    if (requiredScopes.contains(scope)) {
                        limitedScopes.add(scope);
                    } else {
                        foundUnwanted = true;
                    }/*from   w w w  . ja  va 2  s . c  o m*/
                }
                if (!limitedScopes.isEmpty()) {
                    if (foundUnwanted) {
                        throw new RuntimeException("error");
                    }
                    parseContentLevelFolders(file, types, Sets.immutableEnumSet(limitedScopes), generator);
                }
            }
        }
    }
}

From source file:com.opengamma.financial.analytics.model.credit.isdanew.ISDACompliantYieldCurveFunction.java

@Override
public CompiledFunctionDefinition compile(final FunctionCompilationContext compilationContext,
        final Instant atInstant) {
    final ZonedDateTime atZDT = ZonedDateTime.ofInstant(atInstant, ZoneOffset.UTC);
    final ConfigSource configSource = OpenGammaCompilationContext.getConfigSource(compilationContext);
    final InterpolatedYieldCurveSpecificationBuilder curveSpecBuilder = new ConfigDBInterpolatedYieldCurveSpecificationBuilder(
            configSource);//from w ww  .  ja  v a2 s .c o  m
    return new AbstractInvokingCompiledFunction(atZDT.with(LocalTime.MIDNIGHT),
            atZDT.plusDays(1).with(LocalTime.MIDNIGHT).minusNanos(1000000)) {

        @SuppressWarnings("synthetic-access")
        @Override
        public Set<ComputedValue> execute(final FunctionExecutionContext executionContext,
                final FunctionInputs inputs, final ComputationTarget target,
                final Set<ValueRequirement> desiredValues) throws AsynchronousExecution {
            final ZonedDateTime valuationDate = ZonedDateTime.now(executionContext.getValuationClock());
            final HistoricalTimeSeriesBundle timeSeries = (HistoricalTimeSeriesBundle) inputs
                    .getValue(ValueRequirementNames.YIELD_CURVE_INSTRUMENT_CONVERSION_HISTORICAL_TIME_SERIES);
            final ValueRequirement desiredValue = desiredValues.iterator().next();
            final String curveName = desiredValue.getConstraint(ValuePropertyNames.CURVE);
            final String spotDateString = desiredValue.getConstraint(ISDAFunctionConstants.ISDA_CURVE_DATE);
            final LocalDate spotDate = LocalDate.parse(spotDateString, dateFormatter);
            final String offsetString = desiredValue.getConstraint(ISDAFunctionConstants.ISDA_CURVE_OFFSET);
            final int offset = Integer.parseInt(offsetString); //TODO: Is this still used???
            final Object definitionObject = inputs.getValue(ValueRequirementNames.TARGET);
            if (definitionObject == null) {
                throw new OpenGammaRuntimeException(
                        "Couldn't get interpolated yield curve specification: " + curveName);
            }
            final YieldCurveDefinition curveDefinition = (YieldCurveDefinition) definitionObject;
            final Object dataObject = inputs.getValue(ValueRequirementNames.YIELD_CURVE_MARKET_DATA);
            if (dataObject == null) {
                throw new OpenGammaRuntimeException("Couldn't get yield curve data for " + curveName);
            }
            final SnapshotDataBundle marketData = (SnapshotDataBundle) dataObject;
            final InterpolatedYieldCurveSpecification specification = getCurveSpecification(curveDefinition,
                    spotDate);
            final InterpolatedYieldCurveSpecificationWithSecurities specificationWithSecurities = getCurveWithSecurities(
                    specification, executionContext, marketData);
            final ISDAInstrumentTypes[] instruments = new ISDAInstrumentTypes[specificationWithSecurities
                    .getStrips().size()];
            final Period[] tenors = new Period[specificationWithSecurities.getStrips().size()];
            final double[] values = new double[specificationWithSecurities.getStrips().size()];

            Period swapIvl = null;
            int i = 0;
            for (final FixedIncomeStripWithSecurity strip : specificationWithSecurities.getStrips()) {
                final String securityType = strip.getSecurity().getSecurityType();
                if (!(securityType.equals(CashSecurity.SECURITY_TYPE)
                        || securityType.equals(SwapSecurity.SECURITY_TYPE))) {
                    throw new OpenGammaRuntimeException("ISDA curves should only use Libor and swap rates");
                }
                final Double rate = marketData.getDataPoint(strip.getSecurityIdentifier());
                if (rate == null) {
                    throw new OpenGammaRuntimeException("Could not get rate for " + strip);
                }
                if (CashSecurity.SECURITY_TYPE.equals(strip.getSecurity().getSecurityType())) {
                    instruments[i] = ISDAInstrumentTypes.MoneyMarket;
                } else if (SwapSecurity.SECURITY_TYPE.equals(strip.getSecurity().getSecurityType())) {
                    instruments[i] = ISDAInstrumentTypes.Swap;
                    swapIvl = getFixedLegPaymentTenor((SwapSecurity) strip.getSecurity());
                } else {
                    throw new OpenGammaRuntimeException(
                            "Unexpected curve instument type, can only handle cash and swaps, got: "
                                    + strip.getSecurity());
                }
                tenors[i] = strip.getTenor().getPeriod();
                values[i] = rate;
                i++;
            }

            final ISDACompliantYieldCurve yieldCurve = ISDACompliantYieldCurveBuild.build(
                    valuationDate.toLocalDate(), spotDate, instruments, tenors, values, MONEY_MARKET_DCC,
                    SWAP_DCC, swapIvl, CURVE_DCC, badDayConv);

            final ValueProperties properties = createValueProperties().with(ValuePropertyNames.CURVE, curveName)
                    .with(ISDAFunctionConstants.ISDA_CURVE_OFFSET, offsetString)
                    .with(ISDAFunctionConstants.ISDA_CURVE_DATE, spotDateString)
                    .with(ISDAFunctionConstants.ISDA_IMPLEMENTATION,
                            ISDAFunctionConstants.ISDA_IMPLEMENTATION_NEW)
                    .with(ValuePropertyNames.CURVE_CALCULATION_METHOD, ISDAFunctionConstants.ISDA_METHOD_NAME)
                    .get();
            final ValueSpecification spec = new ValueSpecification(ValueRequirementNames.YIELD_CURVE,
                    target.toSpecification(), properties);
            return Collections.singleton(new ComputedValue(spec, yieldCurve));
        }

        private Period getFixedLegPaymentTenor(final SwapSecurity swap) {
            if (swap.getReceiveLeg() instanceof FixedInterestRateLeg) {
                FixedInterestRateLeg fixLeg = (FixedInterestRateLeg) swap.getReceiveLeg();
                return PeriodFrequency.convertToPeriodFrequency(fixLeg.getFrequency()).getPeriod();
            } else if (swap.getPayLeg() instanceof FixedInterestRateLeg) {
                FixedInterestRateLeg fixLeg = (FixedInterestRateLeg) swap.getPayLeg();
                return PeriodFrequency.convertToPeriodFrequency(fixLeg.getFrequency()).getPeriod();
            } else {
                throw new OpenGammaRuntimeException("Got a swap without a fixed leg " + swap);
            }

        }

        @Override
        public ComputationTargetType getTargetType() {
            return ComputationTargetType.CURRENCY;
        }

        @Override
        public boolean canApplyTo(final FunctionCompilationContext context, final ComputationTarget target) {
            return Currency.OBJECT_SCHEME.equals(target.getUniqueId().getScheme());
        }

        @Override
        public Set<ValueSpecification> getResults(final FunctionCompilationContext context,
                final ComputationTarget target) {
            @SuppressWarnings("synthetic-access")
            final ValueProperties properties = createValueProperties().withAny(ValuePropertyNames.CURVE)
                    .withAny(ISDAFunctionConstants.ISDA_CURVE_OFFSET)
                    .withAny(ISDAFunctionConstants.ISDA_CURVE_DATE)
                    .with(ISDAFunctionConstants.ISDA_IMPLEMENTATION,
                            ISDAFunctionConstants.ISDA_IMPLEMENTATION_NEW)
                    .with(ValuePropertyNames.CURVE_CALCULATION_METHOD, ISDAFunctionConstants.ISDA_METHOD_NAME)
                    .get();
            return Collections.singleton(new ValueSpecification(ValueRequirementNames.YIELD_CURVE,
                    target.toSpecification(), properties));
        }

        @Override
        public Set<ValueRequirement> getRequirements(final FunctionCompilationContext context,
                final ComputationTarget target, final ValueRequirement desiredValue) {
            final ValueProperties constraints = desiredValue.getConstraints();
            final Set<String> curveNames = constraints.getValues(ValuePropertyNames.CURVE);
            if (curveNames == null || curveNames.size() != 1) {
                return null;
            }
            final String curveName = Iterables.getOnlyElement(curveNames);
            final ValueProperties properties = ValueProperties.builder()
                    .with(ValuePropertyNames.CURVE, curveName).get();

            // look up yield curve specification - dont rely on YieldCurveSpecificationFunction as that may have been compiled before the yield curve was created
            // this is a slight performance hit over the standard curve spec handling but shouldn't be an issue
            //TODO: should use versionOf rather than latest but we dont access to the valuation clock here
            final YieldCurveDefinition curveDefinition = configSource
                    .getLatestByName(YieldCurveDefinition.class, curveName);
            if (curveDefinition == null) {
                return null;
            }

            final Set<ValueRequirement> requirements = Sets.newHashSetWithExpectedSize(2);
            final ComputationTargetSpecification targetSpec = target.toSpecification();
            requirements.add(new ValueRequirement(ValueRequirementNames.YIELD_CURVE_MARKET_DATA, targetSpec,
                    properties));
            requirements.add(new ValueRequirement(ValueRequirementNames.TARGET,
                    ComputationTargetType.of(YieldCurveDefinition.class), curveDefinition.getUniqueId()));
            return requirements;
        }

        private InterpolatedYieldCurveSpecificationWithSecurities getCurveWithSecurities(
                final InterpolatedYieldCurveSpecification curveSpec,
                final FunctionExecutionContext executionContext, final SnapshotDataBundle marketData) {
            //TODO: Move this to a seperate function
            final FixedIncomeStripIdentifierAndMaturityBuilder builder = new FixedIncomeStripIdentifierAndMaturityBuilder(
                    OpenGammaExecutionContext.getRegionSource(executionContext),
                    OpenGammaExecutionContext.getConventionBundleSource(executionContext),
                    executionContext.getSecuritySource(),
                    OpenGammaExecutionContext.getHolidaySource(executionContext));
            final InterpolatedYieldCurveSpecificationWithSecurities curveSpecificationWithSecurities = builder
                    .resolveToSecurity(curveSpec, marketData);
            return curveSpecificationWithSecurities;
        }

        private InterpolatedYieldCurveSpecification getCurveSpecification(
                final YieldCurveDefinition curveDefinition, final LocalDate curveDate) {
            final InterpolatedYieldCurveSpecification curveSpec = curveSpecBuilder.buildCurve(curveDate,
                    curveDefinition);
            return curveSpec;
        }

    };
}

From source file:edu.buaa.satla.analysis.core.arg.ARGState.java

/**
 * This method does basically the same as removeFromARG for this element, but
 * before destroying it, it will copy all relationships to other elements to
 * a new state. I.e., the replacement element will receive all parents and
 * children of this element, and it will also cover all elements which are
 * currently covered by this element.//  w  w  w .  j a v  a 2  s  .  c  om
 *
 * @param replacement
 */
public void replaceInARGWith(ARGState replacement) {
    assert !destroyed : "Don't use destroyed ARGState " + this;
    assert !replacement.destroyed : "Don't use destroyed ARGState " + replacement;
    assert !isCovered() : "Not implemented: Replacement of covered element " + this;
    assert !replacement.isCovered() : "Cannot replace with covered element " + replacement;

    // copy children
    for (ARGState child : children) {
        assert (child.parents.contains(this)) : "Inconsistent ARG at " + this;
        child.parents.remove(this);
        child.addParent(replacement);
    }
    children.clear();

    for (ARGState parent : parents) {
        assert (parent.children.contains(this)) : "Inconsistent ARG at " + this;
        parent.children.remove(this);
        replacement.addParent(parent);
    }
    parents.clear();

    if (mCoveredByThis != null) {
        if (replacement.mCoveredByThis == null) {
            // lazy initialization because rarely needed
            replacement.mCoveredByThis = Sets.newHashSetWithExpectedSize(mCoveredByThis.size());
        }

        for (ARGState covered : mCoveredByThis) {
            assert covered.mCoveredBy == this : "Inconsistent coverage relation at " + this;
            covered.mCoveredBy = replacement;
            replacement.mCoveredByThis.add(covered);
        }

        mCoveredByThis.clear();
        mCoveredByThis = null;
    }

    destroyed = true;
}

From source file:com.palantir.atlasdb.keyvalue.cassandra.CQLKeyValueService.java

@Override
public Map<Cell, Value> getRows(final String tableName, final Iterable<byte[]> rows, ColumnSelection selection,
        final long startTs) {
    if (!selection.allColumnsSelected()) {
        Collection<byte[]> selectedColumns = selection.getSelectedColumns();
        Set<Cell> cells = Sets.newHashSetWithExpectedSize(selectedColumns.size() * Iterables.size(rows));
        for (byte[] row : rows) {
            for (byte[] col : selectedColumns) {
                cells.add(Cell.create(row, col));
            }//from  w  ww  . j  a  va 2s .co m
        }
        try {
            StartTsResultsCollector collector = new StartTsResultsCollector(startTs);
            loadWithTs(tableName, cells, startTs, false, collector, readConsistency);
            return collector.collectedResults;
        } catch (Throwable t) {
            throw Throwables.throwUncheckedException(t);
        }
    }

    try {
        return getRowsAllColsInternal(tableName, rows, startTs);
    } catch (Throwable t) {
        throw Throwables.throwUncheckedException(t);
    }
}

From source file:com.android.builder.internal.compiler.PreProcessCache.java

protected synchronized void saveItems(@NonNull File itemStorage) throws IOException {
    // write "compact" blob
    DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
    factory.setNamespaceAware(true);/*  w ww .  j  a  v a  2  s  . c om*/
    factory.setValidating(false);
    factory.setIgnoringComments(true);
    DocumentBuilder builder;

    try {
        builder = factory.newDocumentBuilder();
        Document document = builder.newDocument();

        Node rootNode = document.createElement(NODE_ITEMS);
        document.appendChild(rootNode);

        // Set the version
        Attr attr = document.createAttribute(ATTR_VERSION);
        attr.setValue(XML_VERSION);
        rootNode.getAttributes().setNamedItem(attr);

        Set<T> keys = Sets.newHashSetWithExpectedSize(mMap.size() + mStoredItems.size());
        keys.addAll(mMap.keySet());
        keys.addAll(mStoredItems.keySet());

        for (T key : keys) {
            Item item = mMap.get(key);

            if (item != null) {

                Node itemNode = createItemNode(document, key, item);
                if (itemNode != null) {
                    rootNode.appendChild(itemNode);
                }

            } else {
                StoredItem storedItem = mStoredItems.get(key);
                // check that the source file still exists in order to avoid
                // storing libraries that are gone.
                if (storedItem != null && storedItem.getSourceFile().isFile()
                        && storedItem.areOutputFilesPresent()) {
                    Node itemNode = createItemNode(document, key, storedItem);
                    if (itemNode != null) {
                        rootNode.appendChild(itemNode);
                    }
                }
            }
        }

        String content = XmlPrettyPrinter.prettyPrint(document, true);

        itemStorage.getParentFile().mkdirs();
        Files.write(content, itemStorage, Charsets.UTF_8);
    } catch (ParserConfigurationException e) {
    }
}

From source file:org.prebake.fs.FileVersioner.java

/**
 * Checks that the files that an artifact depends on are still valid before
 * marking that artifact valid and recording the prerequisites so that the
 * artifact can be invalidated should any of the prerequisites subsequently
 * change./*w w w .j  a v a2 s.c om*/
 *
 * @param as the address space for item.
 * @param artifact a newly valid non file artifact that was derived from
 *    prerequisites.
 * @param value passed to {@link NonFileArtifact#validate} if the artifact
 *    can indeed be validated.
 * @param prerequisites the files on which item depends.
 * @param prereqHash the hash of prerequisites at the time artifact was
 *    derived from them.
 * @return true if item is really valid -- if its hash is up-to-date.
 *    If false, then one or more of the prerequisites has changed since
 *    artifact was derived.
 */
public <X, T extends NonFileArtifact<X>> boolean updateArtifact(ArtifactAddresser<T> as, T artifact,
        @Nullable X value, Collection<Path> prerequisites, Hash prereqHash) {
    Set<Path> keyPaths = Sets.newHashSetWithExpectedSize(prerequisites.size());
    for (Path p : prerequisites) {
        // Normalize the path failing if not under the root of watched files.
        Path keyPath = toKeyPath(p);
        if (keyPath != null) {
            keyPaths.add(keyPath);
        }
    }

    Iterator<Path> it = keyPaths.iterator();
    if (!it.hasNext()) { // No dependencies.
        if (NO_FILE_HASH.equals(prereqHash)) {
            artifact.validate(value);
            return true;
        } else {
            return false;
        }
    }

    int index = indexForAddresser(as); // assumes addressers long lived
    assert addressers.get(index) == as;
    String address = index + ":" + as.addressFor(artifact);

    // Lock this for read so we can rehash and store the validity without
    // fearing that the file hash store will change in the meantime and fail to
    // invalidate the artifact.
    derivativeHashLock.readLock().lock();
    try {
        Hash.Builder rehash = Hash.builder();
        getHashes(prerequisites, rehash);
        if (!prereqHash.equals(rehash.build())) {
            logger.log(Level.INFO, "Version skew.  Cannot validate {0}", address);
            return false;
        }
        rehash = null;

        ArtifactUpdateLoop loop = makeArtifactUpdateLoop();
        loop.start(address);
        try {
            do {
                loop.put(it.next());
            } while (it.hasNext());
        } finally {
            loop.end();
        }
        artifact.validate(value);
        logger.log(Level.FINE, "Validated {0}", address);
        return true;
    } finally {
        derivativeHashLock.readLock().unlock();
    }
}

From source file:com.google.gerrit.server.notedb.ChangeRebuilderImpl.java

private void buildUpdates(NoteDbUpdateManager manager, ChangeBundle bundle) throws IOException, OrmException {
    manager.setCheckExpectedState(false);
    Change change = new Change(bundle.getChange());
    PatchSet.Id currPsId = change.currentPatchSetId();
    // We will rebuild all events, except for draft comments, in buckets based
    // on author and timestamp.
    List<Event> events = new ArrayList<>();
    Multimap<Account.Id, PatchLineCommentEvent> draftCommentEvents = ArrayListMultimap.create();

    events.addAll(getHashtagsEvents(change, manager));

    // Delete ref only after hashtags have been read
    deleteChangeMetaRef(change, manager.getChangeRepo().cmds);
    deleteDraftRefs(change, manager.getAllUsersRepo());

    Integer minPsNum = getMinPatchSetNum(bundle);
    Set<PatchSet.Id> psIds = Sets.newHashSetWithExpectedSize(bundle.getPatchSets().size());

    for (PatchSet ps : bundle.getPatchSets()) {
        if (ps.getId().get() > currPsId.get()) {
            log.info("Skipping patch set {}, which is higher than current patch set {}", ps.getId(), currPsId);
            continue;
        }/*from   w  ww. j  a  v  a 2 s  . c  om*/
        psIds.add(ps.getId());
        events.add(new PatchSetEvent(change, ps, manager.getChangeRepo().rw));
        for (PatchLineComment c : getPatchLineComments(bundle, ps)) {
            PatchLineCommentEvent e = new PatchLineCommentEvent(c, change, ps, patchListCache);
            if (c.getStatus() == Status.PUBLISHED) {
                events.add(e);
            } else {
                draftCommentEvents.put(c.getAuthor(), e);
            }
        }
    }

    for (PatchSetApproval psa : bundle.getPatchSetApprovals()) {
        if (psIds.contains(psa.getPatchSetId())) {
            events.add(new ApprovalEvent(psa, change.getCreatedOn()));
        }
    }

    for (Table.Cell<ReviewerStateInternal, Account.Id, Timestamp> r : bundle.getReviewers().asTable()
            .cellSet()) {
        events.add(new ReviewerEvent(r, change.getCreatedOn()));
    }

    Change noteDbChange = new Change(null, null, null, null, null);
    for (ChangeMessage msg : bundle.getChangeMessages()) {
        if (msg.getPatchSetId() == null || psIds.contains(msg.getPatchSetId())) {
            events.add(new ChangeMessageEvent(msg, noteDbChange, change.getCreatedOn()));
        }
    }

    sortAndFillEvents(change, noteDbChange, events, minPsNum);

    EventList<Event> el = new EventList<>();
    for (Event e : events) {
        if (!el.canAdd(e)) {
            flushEventsToUpdate(manager, el, change);
            checkState(el.canAdd(e));
        }
        el.add(e);
    }
    flushEventsToUpdate(manager, el, change);

    EventList<PatchLineCommentEvent> plcel = new EventList<>();
    for (Account.Id author : draftCommentEvents.keys()) {
        for (PatchLineCommentEvent e : EVENT_ORDER.sortedCopy(draftCommentEvents.get(author))) {
            if (!plcel.canAdd(e)) {
                flushEventsToDraftUpdate(manager, plcel, change);
                checkState(plcel.canAdd(e));
            }
            plcel.add(e);
        }
        flushEventsToDraftUpdate(manager, plcel, change);
    }
}

From source file:org.sosy_lab.cpachecker.cpa.arg.counterexamples.CEXExporter.java

private static Set<Pair<ARGState, ARGState>> getEdgesOfPath(ARGPath pPath) {
    Set<Pair<ARGState, ARGState>> result = Sets.newHashSetWithExpectedSize(pPath.size());
    Iterator<ARGState> it = pPath.asStatesList().iterator();
    assert it.hasNext();
    ARGState lastElement = it.next();//from   w  w w.j av  a2 s.c om
    while (it.hasNext()) {
        ARGState currentElement = it.next();
        result.add(Pair.of(lastElement, currentElement));
        lastElement = currentElement;
    }
    return result;
}

From source file:com.android.tools.idea.res.AppResourceRepository.java

@NotNull
private Set<String> getAllIds() {
    long currentModCount = getModificationCount();
    if (myIdsModificationCount < currentModCount) {
        myIdsModificationCount = currentModCount;
        if (myIds == null) {
            int size = 0;
            for (FileResourceRepository library : myLibraries) {
                if (library.getAllDeclaredIds() != null) {
                    size += library.getAllDeclaredIds().size();
                }//www  . ja v a 2s .  co m
            }
            myIds = Sets.newHashSetWithExpectedSize(size);
        } else {
            myIds.clear();
        }
        for (FileResourceRepository library : myLibraries) {
            if (library.getAllDeclaredIds() != null) {
                myIds.addAll(library.getAllDeclaredIds());
            }
        }
        // Also add all ids from resource types, just in case it contains things that are not in the libraries.
        myIds.addAll(super.getItemsOfType(ResourceType.ID));
    }
    return myIds;
}

From source file:org.lilyproject.linkindex.LinkIndex.java

public Set<RecordId> getReferrers(AbsoluteRecordId record, SchemaId vtag, SchemaId sourceField)
        throws LinkIndexException, InterruptedException {
    Set<AbsoluteRecordId> absoluteReferrers = getAbsoluteReferrers(record, vtag, sourceField);
    Set<RecordId> referrers = Sets.newHashSetWithExpectedSize(absoluteReferrers.size());
    for (AbsoluteRecordId absoluteReferrer : absoluteReferrers) {
        referrers.add(absoluteReferrer.getRecordId());
    }//from   w ww .  ja v  a 2  s  .  c o  m
    return referrers;
}