Example usage for java.math BigInteger compareTo

List of usage examples for java.math BigInteger compareTo

Introduction

In this page you can find the example usage for java.math BigInteger compareTo.

Prototype

public int compareTo(BigInteger val) 

Source Link

Document

Compares this BigInteger with the specified BigInteger.

Usage

From source file:org.apache.tajo.master.querymaster.Repartitioner.java

public static void scheduleRangeShuffledFetches(TaskSchedulerContext schedulerContext, MasterPlan masterPlan,
        Stage stage, DataChannel channel, int maxNum) throws IOException {
    ExecutionBlock execBlock = stage.getBlock();
    ScanNode scan = execBlock.getScanNodes()[0];
    Path tablePath;//from   ww w.j  a  v a2 s  .  co  m
    tablePath = ((FileStorageManager) StorageManager.getFileStorageManager(stage.getContext().getConf()))
            .getTablePath(scan.getTableName());

    ExecutionBlock sampleChildBlock = masterPlan.getChild(stage.getId(), 0);
    SortNode sortNode = PlannerUtil.findTopNode(sampleChildBlock.getPlan(), NodeType.SORT);
    SortSpec[] sortSpecs = sortNode.getSortKeys();
    Schema sortSchema = new Schema(channel.getShuffleKeys());

    TupleRange[] ranges;
    int determinedTaskNum;

    // calculate the number of maximum query ranges
    TableStats totalStat = computeChildBlocksStats(stage.getContext(), masterPlan, stage.getId());

    // If there is an empty table in inner join, it should return zero rows.
    if (totalStat.getNumBytes() == 0 && totalStat.getColumnStats().size() == 0) {
        return;
    }
    TupleRange mergedRange = TupleUtil.columnStatToRange(sortSpecs, sortSchema, totalStat.getColumnStats(),
            false);

    if (sortNode.getSortPurpose() == SortPurpose.STORAGE_SPECIFIED) {
        StoreType storeType = PlannerUtil.getStoreType(masterPlan.getLogicalPlan());
        CatalogService catalog = stage.getContext().getQueryMasterContext().getWorkerContext().getCatalog();
        LogicalRootNode rootNode = masterPlan.getLogicalPlan().getRootBlock().getRoot();
        TableDesc tableDesc = PlannerUtil.getTableDesc(catalog, rootNode.getChild());
        if (tableDesc == null) {
            throw new IOException("Can't get table meta data from catalog: "
                    + PlannerUtil.getStoreTableName(masterPlan.getLogicalPlan()));
        }
        ranges = StorageManager.getStorageManager(stage.getContext().getConf(), storeType).getInsertSortRanges(
                stage.getContext().getQueryContext(), tableDesc, sortNode.getInSchema(), sortSpecs,
                mergedRange);
        determinedTaskNum = ranges.length;
    } else {
        RangePartitionAlgorithm partitioner = new UniformRangePartition(mergedRange, sortSpecs);
        BigInteger card = partitioner.getTotalCardinality();

        // if the number of the range cardinality is less than the desired number of tasks,
        // we set the the number of tasks to the number of range cardinality.
        if (card.compareTo(BigInteger.valueOf(maxNum)) < 0) {
            LOG.info(stage.getId() + ", The range cardinality (" + card
                    + ") is less then the desired number of tasks (" + maxNum + ")");
            determinedTaskNum = card.intValue();
        } else {
            determinedTaskNum = maxNum;
        }

        LOG.info(stage.getId() + ", Try to divide " + mergedRange + " into " + determinedTaskNum
                + " sub ranges (total units: " + determinedTaskNum + ")");
        ranges = partitioner.partition(determinedTaskNum);
        if (ranges == null || ranges.length == 0) {
            LOG.warn(stage.getId() + " no range infos.");
        }
        TupleUtil.setMaxRangeIfNull(sortSpecs, sortSchema, totalStat.getColumnStats(), ranges);
        if (LOG.isDebugEnabled()) {
            if (ranges != null) {
                for (TupleRange eachRange : ranges) {
                    LOG.debug(stage.getId() + " range: " + eachRange.getStart() + " ~ " + eachRange.getEnd());
                }
            }
        }
    }

    FileFragment dummyFragment = new FileFragment(scan.getTableName(), tablePath, 0, 0,
            new String[] { UNKNOWN_HOST });
    Stage.scheduleFragment(stage, dummyFragment);

    List<FetchImpl> fetches = new ArrayList<FetchImpl>();
    List<ExecutionBlock> childBlocks = masterPlan.getChilds(stage.getId());
    for (ExecutionBlock childBlock : childBlocks) {
        Stage childExecSM = stage.getContext().getStage(childBlock.getId());
        for (Task qu : childExecSM.getTasks()) {
            for (IntermediateEntry p : qu.getIntermediateData()) {
                FetchImpl fetch = new FetchImpl(p.getPullHost(), RANGE_SHUFFLE, childBlock.getId(), 0);
                fetch.addPart(p.getTaskId(), p.getAttemptId());
                fetches.add(fetch);
            }
        }
    }

    boolean ascendingFirstKey = sortSpecs[0].isAscending();
    SortedMap<TupleRange, Collection<FetchImpl>> map;
    if (ascendingFirstKey) {
        map = new TreeMap<TupleRange, Collection<FetchImpl>>();
    } else {
        map = new TreeMap<TupleRange, Collection<FetchImpl>>(new TupleRange.DescendingTupleRangeComparator());
    }

    Set<FetchImpl> fetchSet;
    try {
        RowStoreUtil.RowStoreEncoder encoder = RowStoreUtil.createEncoder(sortSchema);
        for (int i = 0; i < ranges.length; i++) {
            fetchSet = new HashSet<FetchImpl>();
            for (FetchImpl fetch : fetches) {
                String rangeParam = TupleUtil.rangeToQuery(ranges[i],
                        ascendingFirstKey ? i == (ranges.length - 1) : i == 0, encoder);
                FetchImpl copy = null;
                try {
                    copy = fetch.clone();
                } catch (CloneNotSupportedException e) {
                    throw new RuntimeException(e);
                }
                copy.setRangeParams(rangeParam);
                fetchSet.add(copy);
            }
            map.put(ranges[i], fetchSet);
        }

    } catch (UnsupportedEncodingException e) {
        LOG.error(e);
    }

    scheduleFetchesByRoundRobin(stage, map, scan.getTableName(), determinedTaskNum);

    schedulerContext.setEstimatedTaskNum(determinedTaskNum);
}

From source file:com.amazonaws.services.kinesis.clientlibrary.proxies.KinesisLocalFileProxy.java

private void populateDataRecordsFromFile(String file) throws IOException {
    try (BufferedReader in = new BufferedReader(
            new InputStreamReader(new FileInputStream(file), StandardCharsets.UTF_8))) {
        Charset charset = Charset.forName("UTF-8");
        CharsetEncoder encoder = charset.newEncoder();
        String str;//  w w w .j  a  va 2 s. co m
        str = in.readLine();
        if (str != null) {
            ObjectMapper objectMapper = new ObjectMapper();
            SerializedShardList shards = objectMapper.readValue(str, SerializedShardList.class);
            shardList = shards.getShardList();
        }
        if (shardList == null) {
            shardList = new ArrayList<Shard>();
        }

        // Populate shardIds of shards that have an ending sequence number (and which != maxSeqNum).
        // GetRecords will return a null iterator for these after all data has been returned.
        for (Shard shard : shardList) {
            SequenceNumberRange range = shard.getSequenceNumberRange();
            if ((range != null) && (range.getEndingSequenceNumber() != null)) {
                BigInteger endingSequenceNumber = new BigInteger(range.getEndingSequenceNumber());
                if (endingSequenceNumber.compareTo(MAX_SEQUENCE_NUMBER) != 0) {
                    closedShards.add(shard.getShardId());
                }
            }
            shardedDataRecords.put(shard.getShardId(), new ArrayList<Record>());
        }

        while ((str = in.readLine()) != null) {
            String[] strArr = str.split(",");
            if (strArr.length != NUM_FIELDS_IN_FILE) {
                throw new InvalidArgumentException("Unexpected input in file."
                        + "Expected format (shardId, sequenceNumber, partitionKey, dataRecord, timestamp)");
            }
            String shardId = strArr[LocalFileFields.SHARD_ID.getPosition()];
            Record record = new Record();
            record.setSequenceNumber(strArr[LocalFileFields.SEQUENCE_NUMBER.getPosition()]);
            record.setPartitionKey(strArr[LocalFileFields.PARTITION_KEY.getPosition()]);
            ByteBuffer byteBuffer = encoder.encode(CharBuffer.wrap(strArr[LocalFileFields.DATA.getPosition()]));
            record.setData(byteBuffer);
            Date timestamp = new Date(
                    Long.parseLong(strArr[LocalFileFields.APPROXIMATE_ARRIVAL_TIMESTAMP.getPosition()]));
            record.setApproximateArrivalTimestamp(timestamp);
            List<Record> shardRecords = shardedDataRecords.get(shardId);
            if (shardRecords == null) {
                shardRecords = new ArrayList<Record>();
            }
            shardRecords.add(record);
            shardedDataRecords.put(shardId, shardRecords);
        }
    }
}

From source file:org.drools.planner.examples.cloudbalancing.persistence.CloudBalancingGenerator.java

public CloudBalance createCloudBalance(String inputId, int cloudComputerListSize, int cloudProcessListSize) {
    random = new Random(47);
    CloudBalance cloudBalance = new CloudBalance();
    cloudBalance.setId(0L);//www  . j ava  2s  . c o  m
    createCloudComputerList(cloudBalance, cloudComputerListSize);
    createCloudProcessList(cloudBalance, cloudProcessListSize);
    BigInteger possibleSolutionSize = BigInteger.valueOf(cloudBalance.getComputerList().size())
            .pow(cloudBalance.getProcessList().size());
    String flooredPossibleSolutionSize = "10^" + (possibleSolutionSize.toString().length() - 1);
    logger.info("CloudBalance {} has {} computers and {} processes with a search space of {}.", inputId,
            cloudComputerListSize, cloudProcessListSize,
            possibleSolutionSize.compareTo(BigInteger.valueOf(1000L)) < 0 ? possibleSolutionSize
                    : flooredPossibleSolutionSize);
    return cloudBalance;
}

From source file:org.apache.hadoop.hive.ql.optimizer.calcite.translator.RexNodeConverter.java

protected RexNode convert(ExprNodeConstantDesc literal) throws CalciteSemanticException {
    RexBuilder rexBuilder = cluster.getRexBuilder();
    RelDataTypeFactory dtFactory = rexBuilder.getTypeFactory();
    PrimitiveTypeInfo hiveType = (PrimitiveTypeInfo) literal.getTypeInfo();
    RelDataType calciteDataType = TypeConverter.convert(hiveType, dtFactory);

    PrimitiveCategory hiveTypeCategory = hiveType.getPrimitiveCategory();

    ConstantObjectInspector coi = literal.getWritableObjectInspector();
    Object value = ObjectInspectorUtils.copyToStandardJavaObject(coi.getWritableConstantValue(), coi);

    RexNode calciteLiteral = null;//  w  w  w .  j av a  2  s  .  c  o m
    // TODO: Verify if we need to use ConstantObjectInspector to unwrap data
    switch (hiveTypeCategory) {
    case BOOLEAN:
        calciteLiteral = rexBuilder.makeLiteral(((Boolean) value).booleanValue());
        break;
    case BYTE:
        calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Byte) value), calciteDataType);
        break;
    case SHORT:
        calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Short) value), calciteDataType);
        break;
    case INT:
        calciteLiteral = rexBuilder.makeExactLiteral(new BigDecimal((Integer) value));
        break;
    case LONG:
        calciteLiteral = rexBuilder.makeBigintLiteral(new BigDecimal((Long) value));
        break;
    // TODO: is Decimal an exact numeric or approximate numeric?
    case DECIMAL:
        if (value instanceof HiveDecimal) {
            value = ((HiveDecimal) value).bigDecimalValue();
        } else if (value instanceof Decimal128) {
            value = ((Decimal128) value).toBigDecimal();
        }
        if (value == null) {
            // We have found an invalid decimal value while enforcing precision and
            // scale. Ideally,
            // we would replace it with null here, which is what Hive does. However,
            // we need to plumb
            // this thru up somehow, because otherwise having different expression
            // type in AST causes
            // the plan generation to fail after CBO, probably due to some residual
            // state in SA/QB.
            // For now, we will not run CBO in the presence of invalid decimal
            // literals.
            throw new CalciteSemanticException(
                    "Expression " + literal.getExprString() + " is not a valid decimal",
                    UnsupportedFeature.Invalid_decimal);
            // TODO: return createNullLiteral(literal);
        }
        BigDecimal bd = (BigDecimal) value;
        BigInteger unscaled = bd.unscaledValue();
        if (unscaled.compareTo(MIN_LONG_BI) >= 0 && unscaled.compareTo(MAX_LONG_BI) <= 0) {
            calciteLiteral = rexBuilder.makeExactLiteral(bd);
        } else {
            // CBO doesn't support unlimited precision decimals. In practice, this
            // will work...
            // An alternative would be to throw CboSemanticException and fall back
            // to no CBO.
            RelDataType relType = cluster.getTypeFactory().createSqlType(SqlTypeName.DECIMAL, bd.scale(),
                    unscaled.toString().length());
            calciteLiteral = rexBuilder.makeExactLiteral(bd, relType);
        }
        break;
    case FLOAT:
        calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal((Float) value), calciteDataType);
        break;
    case DOUBLE:
        calciteLiteral = rexBuilder.makeApproxLiteral(new BigDecimal((Double) value), calciteDataType);
        break;
    case CHAR:
        if (value instanceof HiveChar) {
            value = ((HiveChar) value).getValue();
        }
        calciteLiteral = rexBuilder.makeLiteral((String) value);
        break;
    case VARCHAR:
        if (value instanceof HiveVarchar) {
            value = ((HiveVarchar) value).getValue();
        }
        calciteLiteral = rexBuilder.makeLiteral((String) value);
        break;
    case STRING:
        calciteLiteral = rexBuilder.makeLiteral((String) value);
        break;
    case DATE:
        Calendar cal = new GregorianCalendar();
        cal.setTime((Date) value);
        calciteLiteral = rexBuilder.makeDateLiteral(cal);
        break;
    case TIMESTAMP:
        Calendar c = null;
        if (value instanceof Calendar) {
            c = (Calendar) value;
        } else {
            c = Calendar.getInstance();
            c.setTimeInMillis(((Timestamp) value).getTime());
        }
        calciteLiteral = rexBuilder.makeTimestampLiteral(c, RelDataType.PRECISION_NOT_SPECIFIED);
        break;
    case INTERVAL_YEAR_MONTH:
        // Calcite year-month literal value is months as BigDecimal
        BigDecimal totalMonths = BigDecimal.valueOf(((HiveIntervalYearMonth) value).getTotalMonths());
        calciteLiteral = rexBuilder.makeIntervalLiteral(totalMonths,
                new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, new SqlParserPos(1, 1)));
        break;
    case INTERVAL_DAY_TIME:
        // Calcite day-time interval is millis value as BigDecimal
        // Seconds converted to millis
        BigDecimal secsValueBd = BigDecimal.valueOf(((HiveIntervalDayTime) value).getTotalSeconds() * 1000);
        // Nanos converted to millis
        BigDecimal nanosValueBd = BigDecimal.valueOf(((HiveIntervalDayTime) value).getNanos(), 6);
        calciteLiteral = rexBuilder.makeIntervalLiteral(secsValueBd.add(nanosValueBd),
                new SqlIntervalQualifier(TimeUnit.DAY, TimeUnit.SECOND, new SqlParserPos(1, 1)));
        break;
    case VOID:
        calciteLiteral = cluster.getRexBuilder().makeLiteral(null,
                cluster.getTypeFactory().createSqlType(SqlTypeName.NULL), true);
        break;
    case BINARY:
    case UNKNOWN:
    default:
        throw new RuntimeException("UnSupported Literal");
    }

    return calciteLiteral;
}

From source file:com.cloud.utils.net.NetUtils.java

public static String getIp6FromRange(final String ip6Range) {
    final String[] ips = ip6Range.split("-");
    final String startIp = ips[0];
    final IPv6Address start = IPv6Address.fromString(startIp);
    final BigInteger gap = countIp6InRange(ip6Range);
    BigInteger next = new BigInteger(gap.bitLength(), s_rand);
    while (next.compareTo(gap) >= 0) {
        next = new BigInteger(gap.bitLength(), s_rand);
    }// w  ww  . j  a  v  a  2  s.  co  m
    InetAddress resultAddr = null;
    final BigInteger startInt = convertIPv6AddressToBigInteger(start);
    if (startInt != null) {
        final BigInteger resultInt = startInt.add(next);
        try {
            resultAddr = InetAddress.getByAddress(resultInt.toByteArray());
        } catch (final UnknownHostException e) {
            return null;
        }
    }
    if (resultAddr != null) {
        final IPv6Address ip = IPv6Address.fromInetAddress(resultAddr);
        return ip.toString();
    }
    return null;
}

From source file:info.savestate.saveybot.JSONFileManipulator.java

public String lowestSlot() {
    BigInteger lowest = BigInteger.ZERO;
    JSONArray json = getJSON();/*from   w  ww . j a  va  2  s  .co  m*/
    boolean passed = false;
    while (!passed) {
        passed = true;
        for (int i = 0; i < json.length(); i++) {
            JSONObject o = json.getJSONObject(i);
            BigInteger current = o.getBigInteger("slot");
            if (current.compareTo(lowest) == 0) {
                lowest = lowest.add(BigInteger.ONE);
                passed = false;
                break;
            }
        }
    }
    return lowest.toString();
}

From source file:com.amazonaws.kinesis.agg.AggRecord.java

/**
 * Validate the explicit hash key of an input Kinesis user record.
 * //from ww  w.jav  a2 s. c o  m
 * @param explicitHashKey
 *            The string containing the input explicit hash key to validate.
 */
private void validateExplicitHashKey(final String explicitHashKey) {
    if (explicitHashKey == null) {
        return;
    }

    BigInteger b = null;
    try {
        b = new BigInteger(explicitHashKey);
        if (b.compareTo(UINT_128_MAX) > 0 || b.compareTo(BigInteger.ZERO) < 0) {
            throw new IllegalArgumentException(
                    "Invalid explicitHashKey, must be greater or equal to zero and less than or equal to (2^128 - 1), got "
                            + explicitHashKey);
        }
    } catch (NumberFormatException e) {
        throw new IllegalArgumentException(
                "Invalid explicitHashKey, must be an integer, got " + explicitHashKey);
    }
}

From source file:org.mifos.platform.questionnaire.validators.QuestionnaireValidatorImpl.java

private boolean invalidNumericAnswer(String answer, Integer allowedMin, Integer allowedMax) {
    boolean result;
    try {//w ww  .  j a  v a  2s.  c o  m
        BigInteger answerAsInt = new BigInteger(answer, 10);
        result = (allowedMin != null && answerAsInt.compareTo(new BigInteger(allowedMin.toString())) < 0)
                || (allowedMax != null && answerAsInt.compareTo(new BigInteger(allowedMax.toString())) > 0);
    } catch (NumberFormatException e) {
        result = true;
    }
    return result;
}

From source file:burstcoin.observer.service.AssetService.java

private String convertPrice(String priceString, int decimals) {
    BigInteger price = new BigInteger(priceString);
    BigInteger amount = price.multiply(new BigInteger("" + (long) Math.pow(10, decimals)));
    String negative = "";
    String afterComma = "";
    String fractionalPart = amount.mod(new BigInteger("100000000")).toString();
    amount = amount.divide(new BigInteger("100000000"));
    if (amount.compareTo(BigInteger.ZERO) < 0) {
        amount = amount.abs();//from  w w  w.j  a  v  a2  s.  c  o m
        negative = "-";
    }
    if (!fractionalPart.equals("0")) {
        afterComma = ".";
        for (int i = fractionalPart.length(); i < 8; i++) {
            afterComma += "0";
        }
        afterComma += fractionalPart.replace("0+$", "");
    }
    String result = negative + amount + afterComma;
    while (result.lastIndexOf("0") == result.length() - 1 && result.contains(".")) {
        result = result.substring(0, result.length() - 1);
    }
    if (result.lastIndexOf(".") == result.length() - 1) {
        result = result.substring(0, result.length() - 1);
    }
    return result;
}

From source file:be.fedict.trust.service.bean.HarvesterMDB.java

private void processHarvestMessage(HarvestMessage harvestMessage) {
    if (null == harvestMessage) {
        return;/*  w ww.j  ava  2  s .  c  o m*/
    }
    String caName = harvestMessage.getCaName();
    boolean update = harvestMessage.isUpdate();
    String crlFilePath = harvestMessage.getCrlFile();
    File crlFile = new File(crlFilePath);

    LOG.debug("processHarvestMessage - Don't have CA's Serial Number??");
    LOG.debug("issuer: " + caName);
    CertificateAuthorityEntity certificateAuthority = this.certificateAuthorityDAO
            .findCertificateAuthority(caName);
    if (null == certificateAuthority) {
        LOG.error("unknown certificate authority: " + caName);
        deleteCrlFile(crlFile);
        return;
    }
    if (!update && Status.PROCESSING != certificateAuthority.getStatus()) {
        /*
         * Possible that another harvester instance already activated or is
         * processing the CA cache in the meanwhile.
         */
        LOG.debug("CA status not marked for processing");
        deleteCrlFile(crlFile);
        return;
    }

    Date validationDate = new Date();

    X509Certificate issuerCertificate = certificateAuthority.getCertificate();

    Date notAfter = issuerCertificate.getNotAfter();
    if (validationDate.after(notAfter)) {
        LOG.info("will not update CRL cache for expired CA: " + issuerCertificate.getSubjectX500Principal());
        deleteCrlFile(crlFile);
        return;
    }

    FileInputStream crlInputStream;
    try {
        crlInputStream = new FileInputStream(crlFile);
    } catch (FileNotFoundException e) {
        LOG.error("CRL file does not exist: " + crlFilePath);
        return;
    }
    X509CRL crl;
    try {
        CertificateFactory certificateFactory = CertificateFactory.getInstance("X.509", "BC");
        crl = (X509CRL) certificateFactory.generateCRL(crlInputStream);
    } catch (Exception e) {
        LOG.error("BC error: " + e.getMessage(), e);
        deleteCrlFile(crlFile);
        return;
    }

    LOG.debug("checking integrity CRL...");
    boolean crlValid = CrlTrustLinker.checkCrlIntegrity(crl, issuerCertificate, validationDate);
    if (!crlValid) {
        this.auditDAO.logAudit("Invalid CRL for CA=" + caName);
        deleteCrlFile(crlFile);
        return;
    }
    BigInteger crlNumber = getCrlNumber(crl);
    LOG.debug("CRL number: " + crlNumber);

    BigInteger currentCrlNumber = this.certificateAuthorityDAO.findCrlNumber(caName);
    if (null != currentCrlNumber) {
        LOG.debug("CRL number in database: " + currentCrlNumber);
    }
    if (null != currentCrlNumber && currentCrlNumber.compareTo(crlNumber) >= 0
            && certificateAuthority.getStatus() == Status.ACTIVE) {
        // current CRL cache is higher or equal, no update needed
        LOG.debug("current CA cache is new enough.");
        deleteCrlFile(crlFile);
        return;
    }

    List<RevokedCertificateEntity> revokedCertificateEntities = this.certificateAuthorityDAO
            .getRevokedCertificates(caName);
    LOG.debug("number of revoked certificates in database: " + revokedCertificateEntities.size());
    Map<String, RevokedCertificateEntity> revokedCertificatesMap = new HashMap<String, RevokedCertificateEntity>();
    for (RevokedCertificateEntity revokedCertificateEntity : revokedCertificateEntities) {
        String serialNumber = revokedCertificateEntity.getPk().getSerialNumber();
        revokedCertificatesMap.put(serialNumber, revokedCertificateEntity);
    }

    LOG.debug("processing CRL... " + caName);
    boolean isIndirect;
    Enumeration revokedCertificatesEnum;
    try {
        isIndirect = isIndirectCRL(crl);
        revokedCertificatesEnum = getRevokedCertificatesEnum(crl);
    } catch (Exception e) {
        this.auditDAO.logAudit("Failed to parse CRL for CA=" + caName);
        this.failures++;
        throw new RuntimeException(e);
    }

    int entries = 0;
    if (revokedCertificatesEnum.hasMoreElements()) {
        /*
         * Split up persisting the crl entries to avoid memory issues.
         */
        Set<X509CRLEntry> revokedCertsBatch = new HashSet<X509CRLEntry>();
        X500Principal previousCertificateIssuer = crl.getIssuerX500Principal();
        int added = 0;
        while (revokedCertificatesEnum.hasMoreElements()) {

            TBSCertList.CRLEntry entry = (TBSCertList.CRLEntry) revokedCertificatesEnum.nextElement();
            X500Name x500name = new X500Name(previousCertificateIssuer.getName(X500Principal.RFC1779));
            X509CRLEntryObject revokedCertificate = new X509CRLEntryObject(entry, isIndirect, x500name);
            previousCertificateIssuer = revokedCertificate.getCertificateIssuer();

            revokedCertsBatch.add(revokedCertificate);
            added++;
            if (added == BATCH_SIZE) {
                /*
                 * Persist batch
                 */
                this.certificateAuthorityDAO.updateRevokedCertificates(revokedCertsBatch, crlNumber,
                        crl.getIssuerX500Principal(), revokedCertificatesMap);
                entries += revokedCertsBatch.size();
                revokedCertsBatch.clear();
                added = 0;
            }
        }
        /*
         * Persist final batch
         */
        this.certificateAuthorityDAO.updateRevokedCertificates(revokedCertsBatch, crlNumber,
                crl.getIssuerX500Principal(), revokedCertificatesMap);
        entries += revokedCertsBatch.size();

        /*
         * Cleanup redundant CRL entries
         */
        if (null != crlNumber) {
            this.certificateAuthorityDAO.removeOldRevokedCertificates(crlNumber,
                    crl.getIssuerX500Principal().toString());
        }
    }

    deleteCrlFile(crlFile);

    LOG.debug("CRL this update: " + crl.getThisUpdate());
    LOG.debug("CRL next update: " + crl.getNextUpdate());
    certificateAuthority.setStatus(Status.ACTIVE);
    certificateAuthority.setThisUpdate(crl.getThisUpdate());
    certificateAuthority.setNextUpdate(crl.getNextUpdate());
    LOG.debug("cache activated for CA: " + crl.getIssuerX500Principal() + " (entries=" + entries + ")");
}