Example usage for com.google.common.collect Sets newHashSetWithExpectedSize

List of usage examples for com.google.common.collect Sets newHashSetWithExpectedSize

Introduction

In this page you can find the example usage for com.google.common.collect Sets newHashSetWithExpectedSize.

Prototype

public static <E> HashSet<E> newHashSetWithExpectedSize(int expectedSize) 

Source Link

Document

Creates a HashSet instance, with a high enough initial table size that it should hold expectedSize elements without resizing.

Usage

From source file:org.n52.svalbard.encode.SensorMLEncoderv101.java

private void addAbstractProcessValues(final AbstractProcessType abstractProcess,
        final AbstractProcess sosAbstractProcess) throws EncodingException {
    if (sosAbstractProcess.isSetGmlID()) {
        abstractProcess.setId(sosAbstractProcess.getGmlId());
    }/*from   w ww.  ja  va 2  s.  c  o  m*/

    if (sosAbstractProcess.isSetCapabilities()) {
        final Capabilities[] existing = abstractProcess.getCapabilitiesArray();
        final Set<String> names = Sets.newHashSetWithExpectedSize(existing.length);
        for (final Capabilities element : existing) {
            if (element.getName() != null) {
                names.add(element.getName());
            }
        }
        for (final SmlCapabilities sosCapability : sosAbstractProcess.getCapabilities()) {
            final Capabilities c = createCapability(sosCapability);
            // replace existing capability with the same name
            if (names.contains(c.getName())) {
                removeCapability(abstractProcess, c);
            }
            abstractProcess.addNewCapabilities().set(c);
        }
    }

    // set description
    if (sosAbstractProcess.isSetDescription() && !abstractProcess.isSetDescription()) {
        abstractProcess.addNewDescription().setStringValue(sosAbstractProcess.getDescription());
    }
    if (sosAbstractProcess.isSetName() && CollectionHelper.isNullOrEmpty(abstractProcess.getNameArray())) {
        // TODO check if override existing names
        addNamesToAbstractProcess(abstractProcess, sosAbstractProcess.getNames());
    }
    // set identification
    if (sosAbstractProcess.isSetIdentifications()) {
        abstractProcess.setIdentificationArray(createIdentification(sosAbstractProcess.getIdentifications()));
    }
    // set classification
    if (sosAbstractProcess.isSetClassifications()) {
        abstractProcess.setClassificationArray(createClassification(sosAbstractProcess.getClassifications()));
    }
    // set characteristics
    if (sosAbstractProcess.isSetCharacteristics()) {
        abstractProcess.setCharacteristicsArray(createCharacteristics(sosAbstractProcess.getCharacteristics()));
    }
    // set documentation
    if (sosAbstractProcess.isSetDocumentation()
            && CollectionHelper.isNullOrEmpty(abstractProcess.getDocumentationArray())) {
        abstractProcess.setDocumentationArray(createDocumentationArray(sosAbstractProcess.getDocumentation()));
    }
    // set contacts if contacts aren't already present in the abstract
    // process
    if (sosAbstractProcess.isSetContact()
            && CollectionHelper.isNullOrEmpty(abstractProcess.getContactArray())) {
        ContactList contactList = createContactList(sosAbstractProcess.getContact());
        if (contactList != null && contactList.getMemberArray().length > 0) {
            abstractProcess.addNewContact().setContactList(contactList);
        }
    }
    // set keywords
    if (sosAbstractProcess.isSetKeywords()) {
        final List<String> keywords = sosAbstractProcess.getKeywords();
        final int length = abstractProcess.getKeywordsArray().length;
        for (int i = 0; i < length; ++i) {
            abstractProcess.removeKeywords(i);
        }
        abstractProcess.addNewKeywords().addNewKeywordList()
                .setKeywordArray(keywords.toArray(new String[keywords.size()]));
    }

    if (sosAbstractProcess.isSetValidTime()) {
        if (abstractProcess.isSetValidTime()) {
            // remove existing validTime element
            final XmlCursor newCursor = abstractProcess.getValidTime().newCursor();
            newCursor.removeXml();
            newCursor.dispose();
        }
        final Time time = sosAbstractProcess.getMergedValidTime();
        final XmlObject xbtime = encodeObjectToXml(GmlConstants.NS_GML, time);
        if (time instanceof TimeInstant) {
            abstractProcess.addNewValidTime().addNewTimeInstant().set(xbtime);
        } else if (time instanceof TimePeriod) {
            abstractProcess.addNewValidTime().addNewTimePeriod().set(xbtime);
        }
    }
}

From source file:org.n52.svalbard.encode.SensorMLEncoderv20.java

private void addDescribedObjectValues(DescribedObjectType dot, DescribedObject describedObject)
        throws EncodingException {
    if (!describedObject.isSetGmlID()) {
        describedObject.setGmlId("do_" + JavaHelper.generateID(describedObject.toString()));
    }//from w w w  .  j  a v a2 s .c  o  m
    if (dot.getId() == null || dot.getId().isEmpty()) {
        dot.setId(describedObject.getGmlId());
    }

    // update/set gml:identifier
    if (describedObject.isSetIdentifier()) {
        describedObject.getIdentifierCodeWithAuthority().setCodeSpace(OGCConstants.UNIQUE_ID);
        XmlObject encodeObjectToXml = encodeObjectToXmlGml32(describedObject.getIdentifierCodeWithAuthority());
        if (encodeObjectToXml != null) {
            if (dot.isSetIdentifier()) {
                dot.getIdentifier().set(encodeObjectToXml);
            } else {
                dot.addNewIdentifier().set(encodeObjectToXml);
            }
        }
    }

    // set capabilities
    if (describedObject.isSetCapabilities()) {
        final Capabilities[] existing = dot.getCapabilitiesArray();
        final Set<String> names = Sets.newHashSetWithExpectedSize(existing.length);
        for (final Capabilities element : existing) {
            if (element.getName() != null) {
                names.add(element.getName());
            }
        }
        for (final SmlCapabilities sosCapability : describedObject.getCapabilities()) {
            // check for observedBBOX, currently not supported, how to model
            // in SML 2.0?
            // Update Discovery Profile
            if (!SensorMLConstants.ELEMENT_NAME_OBSERVED_BBOX.equals(sosCapability.getName())) {
                final Capabilities c = createCapability(sosCapability);
                // replace existing capability with the same name
                if (c != null) {
                    if (names.contains(c.getName())) {
                        removeCapability(dot, c);
                    }
                    dot.addNewCapabilities().set(c);
                }
            }
        }
    }
    // set description
    if (describedObject.isSetDescription() && !dot.isSetDescription()) {
        dot.addNewDescription().setStringValue(describedObject.getDescription());
    }
    // set names
    if (describedObject.isSetName() && CollectionHelper.isNullOrEmpty(dot.getNameArray())) {
        // TODO check if override existing names
        addNamesToAbstractProcess(dot, describedObject.getNames());
    }
    // set location
    // set extension
    // set keywords
    if (describedObject.isSetKeywords()) {
        if (CollectionHelper.isNullOrEmpty(dot.getKeywordsArray())) {
            final List<String> keywords = describedObject.getKeywords();
            // final int length = dot.getKeywordsArray().length;
            // for (int i = 0; i < length; ++i) {
            // dot.removeKeywords(i);
            // }
            dot.addNewKeywords().addNewKeywordList()
                    .setKeywordArray(keywords.toArray(new String[keywords.size()]));
            // TODO else
        }
    }
    // set identification
    if (describedObject.isSetIdentifications()) {
        // TODO check for merging identifications if exists
        dot.setIdentificationArray(createIdentification(describedObject.getIdentifications()));
    }
    // set classification
    if (describedObject.isSetClassifications()) {
        dot.setClassificationArray(createClassification(describedObject.getClassifications()));
    }
    // set validTime
    if (describedObject.isSetValidTime() && CollectionHelper.isNullOrEmpty(dot.getValidTimeArray())) {
        for (Time time : describedObject.getValidTime()) {
            final XmlObject xbtime = encodeObjectToXmlGml32(time);
            if (time instanceof TimeInstant) {
                dot.addNewValidTime().addNewTimeInstant().set(xbtime);
            } else if (time instanceof TimePeriod) {
                dot.addNewValidTime().addNewTimePeriod().set(xbtime);
            }
        }

        // } else {
        // TODO remove or
        // remove existing validTime element
        // final XmlCursor newCursor = dot.getValidTime().newCursor();
        // newCursor.removeXml();
        // newCursor.dispose();
    }
    // set securityConstraints
    // set legalConstraints
    // set characteristics
    if (describedObject.isSetCharacteristics()) {
        if (CollectionHelper.isNullOrEmpty(dot.getCharacteristicsArray())) {
            dot.setCharacteristicsArray(createCharacteristics(describedObject.getCharacteristics()));
        }
    }
    // set contacts if contacts aren't already present in the abstract
    // process
    // if (describedObject.isSetContact() &&
    // CollectionHelper.isNotNullOrEmpty(dot.getContactsArray())) {
    if (describedObject.isSetContact()) {
        if (CollectionHelper.isNullOrEmpty(dot.getContactsArray())) {
            ContactListType cl = ContactListType.Factory.newInstance();
            for (SmlContact contact : describedObject.getContact()) {
                if (contact instanceof SmlResponsibleParty) {
                    if (contact.isSetHref()) {
                        XmlObject xml = encodeObjectToXml(GmdConstants.NS_GMD, (SmlResponsibleParty) contact,
                                EncodingContext.of(XmlBeansEncodingFlags.PROPERTY_TYPE));
                        cl.addNewContact().set(xml);
                    } else {
                        XmlObject encodeObjectToXml = encodeObjectToXml(GmdConstants.NS_GMD, contact);
                        if (encodeObjectToXml != null) {
                            cl.addNewContact().addNewCIResponsibleParty().set(encodeObjectToXml);
                        }
                    }
                }
            }
            if (CollectionHelper.isNotNullOrEmpty(cl.getContactArray())) {
                dot.addNewContacts().setContactList(cl);
            }
        }
    }
    // set documentation
    if (describedObject.isSetDocumentation()) {
        if (CollectionHelper.isNullOrEmpty(dot.getDocumentationArray())) {
            dot.setDocumentationArray(createDocumentationArray(describedObject.getDocumentation()));
        }
    }
    // set history

}

From source file:org.apache.phoenix.coprocessor.UngroupedAggregateRegionObserver.java

@Override
protected RegionScanner doPostScannerOpen(final ObserverContext<RegionCoprocessorEnvironment> c,
        final Scan scan, final RegionScanner s) throws IOException {
    Region region = c.getEnvironment().getRegion();
    long ts = scan.getTimeRange().getMax();
    StatisticsCollector stats = null;//from w  w  w  . j av a  2 s .c  o  m
    if (ScanUtil.isAnalyzeTable(scan)) {
        byte[] gp_width_bytes = scan.getAttribute(BaseScannerRegionObserver.GUIDEPOST_WIDTH_BYTES);
        byte[] gp_per_region_bytes = scan.getAttribute(BaseScannerRegionObserver.GUIDEPOST_PER_REGION);
        // Let this throw, as this scan is being done for the sole purpose of collecting stats
        stats = new StatisticsCollector(c.getEnvironment(), region.getRegionInfo().getTable().getNameAsString(),
                ts, gp_width_bytes, gp_per_region_bytes);
    }
    int offsetToBe = 0;
    if (ScanUtil.isLocalIndex(scan)) {
        /*
         * For local indexes, we need to set an offset on row key expressions to skip
         * the region start key.
         */
        offsetToBe = region.getRegionInfo().getStartKey().length != 0
                ? region.getRegionInfo().getStartKey().length
                : region.getRegionInfo().getEndKey().length;
        ScanUtil.setRowKeyOffset(scan, offsetToBe);
    }
    final int offset = offsetToBe;

    PTable projectedTable = null;
    PTable writeToTable = null;
    byte[][] values = null;
    byte[] descRowKeyTableBytes = scan.getAttribute(UPGRADE_DESC_ROW_KEY);
    boolean isDescRowKeyOrderUpgrade = descRowKeyTableBytes != null;
    if (isDescRowKeyOrderUpgrade) {
        logger.warn("Upgrading row key for " + region.getRegionInfo().getTable().getNameAsString());
        projectedTable = deserializeTable(descRowKeyTableBytes);
        try {
            writeToTable = PTableImpl.makePTable(projectedTable, true);
        } catch (SQLException e) {
            ServerUtil.throwIOException("Upgrade failed", e); // Impossible
        }
        values = new byte[projectedTable.getPKColumns().size()][];
    }
    byte[] localIndexBytes = scan.getAttribute(LOCAL_INDEX_BUILD);
    List<IndexMaintainer> indexMaintainers = localIndexBytes == null ? null
            : IndexMaintainer.deserialize(localIndexBytes);
    List<Mutation> indexMutations = localIndexBytes == null ? Collections.<Mutation>emptyList()
            : Lists.<Mutation>newArrayListWithExpectedSize(1024);

    RegionScanner theScanner = s;

    byte[] indexUUID = scan.getAttribute(PhoenixIndexCodec.INDEX_UUID);
    List<Expression> selectExpressions = null;
    byte[] upsertSelectTable = scan.getAttribute(BaseScannerRegionObserver.UPSERT_SELECT_TABLE);
    boolean isUpsert = false;
    boolean isDelete = false;
    byte[] deleteCQ = null;
    byte[] deleteCF = null;
    byte[] emptyCF = null;
    ImmutableBytesWritable ptr = new ImmutableBytesWritable();
    if (upsertSelectTable != null) {
        isUpsert = true;
        projectedTable = deserializeTable(upsertSelectTable);
        selectExpressions = deserializeExpressions(
                scan.getAttribute(BaseScannerRegionObserver.UPSERT_SELECT_EXPRS));
        values = new byte[projectedTable.getPKColumns().size()][];
    } else {
        byte[] isDeleteAgg = scan.getAttribute(BaseScannerRegionObserver.DELETE_AGG);
        isDelete = isDeleteAgg != null && Bytes.compareTo(PDataType.TRUE_BYTES, isDeleteAgg) == 0;
        if (!isDelete) {
            deleteCF = scan.getAttribute(BaseScannerRegionObserver.DELETE_CF);
            deleteCQ = scan.getAttribute(BaseScannerRegionObserver.DELETE_CQ);
        }
        emptyCF = scan.getAttribute(BaseScannerRegionObserver.EMPTY_CF);
    }
    TupleProjector tupleProjector = null;
    Region dataRegion = null;
    byte[][] viewConstants = null;
    ColumnReference[] dataColumns = IndexUtil.deserializeDataTableColumnsToJoin(scan);
    boolean localIndexScan = ScanUtil.isLocalIndex(scan);
    final TupleProjector p = TupleProjector.deserializeProjectorFromScan(scan);
    final HashJoinInfo j = HashJoinInfo.deserializeHashJoinFromScan(scan);
    if ((localIndexScan && !isDelete && !isDescRowKeyOrderUpgrade) || (j == null && p != null)) {
        if (dataColumns != null) {
            tupleProjector = IndexUtil.getTupleProjector(scan, dataColumns);
            dataRegion = IndexUtil.getDataRegion(c.getEnvironment());
            viewConstants = IndexUtil.deserializeViewConstantsFromScan(scan);
        }
        ImmutableBytesWritable tempPtr = new ImmutableBytesWritable();
        theScanner = getWrappedScanner(c, theScanner, offset, scan, dataColumns, tupleProjector, dataRegion,
                indexMaintainers == null ? null : indexMaintainers.get(0), viewConstants, p, tempPtr);
    }

    if (j != null) {
        theScanner = new HashJoinRegionScanner(theScanner, p, j, ScanUtil.getTenantId(scan),
                c.getEnvironment());
    }

    int batchSize = 0;
    List<Mutation> mutations = Collections.emptyList();
    boolean buildLocalIndex = indexMaintainers != null && dataColumns == null && !localIndexScan;
    if (isDescRowKeyOrderUpgrade || isDelete || isUpsert || (deleteCQ != null && deleteCF != null)
            || emptyCF != null || buildLocalIndex) {
        // TODO: size better
        mutations = Lists.newArrayListWithExpectedSize(1024);
        batchSize = c.getEnvironment().getConfiguration().getInt(MUTATE_BATCH_SIZE_ATTRIB,
                QueryServicesOptions.DEFAULT_MUTATE_BATCH_SIZE);
    }
    Aggregators aggregators = ServerAggregators.deserialize(
            scan.getAttribute(BaseScannerRegionObserver.AGGREGATORS), c.getEnvironment().getConfiguration());
    Aggregator[] rowAggregators = aggregators.getAggregators();
    boolean hasMore;
    boolean hasAny = false;
    MultiKeyValueTuple result = new MultiKeyValueTuple();
    if (logger.isDebugEnabled()) {
        logger.debug(LogUtil.addCustomAnnotations(
                "Starting ungrouped coprocessor scan " + scan + " " + region.getRegionInfo(),
                ScanUtil.getCustomAnnotations(scan)));
    }
    long rowCount = 0;
    final RegionScanner innerScanner = theScanner;
    region.startRegionOperation();
    try {
        synchronized (innerScanner) {
            do {
                List<Cell> results = new ArrayList<Cell>();
                // Results are potentially returned even when the return value of s.next is false
                // since this is an indication of whether or not there are more values after the
                // ones returned
                hasMore = innerScanner.nextRaw(results);
                if (stats != null) {
                    stats.collectStatistics(results);
                }
                if (!results.isEmpty()) {
                    rowCount++;
                    result.setKeyValues(results);
                    try {
                        if (isDescRowKeyOrderUpgrade) {
                            Arrays.fill(values, null);
                            Cell firstKV = results.get(0);
                            RowKeySchema schema = projectedTable.getRowKeySchema();
                            int maxOffset = schema.iterator(firstKV.getRowArray(),
                                    firstKV.getRowOffset() + offset, firstKV.getRowLength(), ptr);
                            for (int i = 0; i < schema.getFieldCount(); i++) {
                                Boolean hasValue = schema.next(ptr, i, maxOffset);
                                if (hasValue == null) {
                                    break;
                                }
                                Field field = schema.getField(i);
                                if (field.getSortOrder() == SortOrder.DESC) {
                                    // Special case for re-writing DESC ARRAY, as the actual byte value needs to change in this case
                                    if (field.getDataType().isArrayType()) {
                                        field.getDataType().coerceBytes(ptr, null, field.getDataType(),
                                                field.getMaxLength(), field.getScale(), field.getSortOrder(),
                                                field.getMaxLength(), field.getScale(), field.getSortOrder(),
                                                true); // force to use correct separator byte
                                    }
                                    // Special case for re-writing DESC CHAR or DESC BINARY, to force the re-writing of trailing space characters
                                    else if (field.getDataType() == PChar.INSTANCE
                                            || field.getDataType() == PBinary.INSTANCE) {
                                        int len = ptr.getLength();
                                        while (len > 0 && ptr.get()[ptr.getOffset() + len
                                                - 1] == StringUtil.SPACE_UTF8) {
                                            len--;
                                        }
                                        ptr.set(ptr.get(), ptr.getOffset(), len);
                                        // Special case for re-writing DESC FLOAT and DOUBLE, as they're not inverted like they should be (PHOENIX-2171)
                                    } else if (field.getDataType() == PFloat.INSTANCE
                                            || field.getDataType() == PDouble.INSTANCE) {
                                        byte[] invertedBytes = SortOrder.invert(ptr.get(), ptr.getOffset(),
                                                ptr.getLength());
                                        ptr.set(invertedBytes);
                                    }
                                } else if (field.getDataType() == PBinary.INSTANCE) {
                                    // Remove trailing space characters so that the setValues call below will replace them
                                    // with the correct zero byte character. Note this is somewhat dangerous as these
                                    // could be legit, but I don't know what the alternative is.
                                    int len = ptr.getLength();
                                    while (len > 0
                                            && ptr.get()[ptr.getOffset() + len - 1] == StringUtil.SPACE_UTF8) {
                                        len--;
                                    }
                                    ptr.set(ptr.get(), ptr.getOffset(), len);
                                }
                                values[i] = ptr.copyBytes();
                            }
                            writeToTable.newKey(ptr, values);
                            if (Bytes.compareTo(firstKV.getRowArray(), firstKV.getRowOffset() + offset,
                                    firstKV.getRowLength(), ptr.get(), ptr.getOffset() + offset,
                                    ptr.getLength()) == 0) {
                                continue;
                            }
                            byte[] newRow = ByteUtil.copyKeyBytesIfNecessary(ptr);
                            if (offset > 0) { // for local indexes (prepend region start key)
                                byte[] newRowWithOffset = new byte[offset + newRow.length];
                                System.arraycopy(firstKV.getRowArray(), firstKV.getRowOffset(),
                                        newRowWithOffset, 0, offset);
                                ;
                                System.arraycopy(newRow, 0, newRowWithOffset, offset, newRow.length);
                                newRow = newRowWithOffset;
                            }
                            byte[] oldRow = Bytes.copy(firstKV.getRowArray(), firstKV.getRowOffset(),
                                    firstKV.getRowLength());
                            for (Cell cell : results) {
                                // Copy existing cell but with new row key
                                Cell newCell = new KeyValue(newRow, 0, newRow.length, cell.getFamilyArray(),
                                        cell.getFamilyOffset(), cell.getFamilyLength(),
                                        cell.getQualifierArray(), cell.getQualifierOffset(),
                                        cell.getQualifierLength(), cell.getTimestamp(),
                                        KeyValue.Type.codeToType(cell.getTypeByte()), cell.getValueArray(),
                                        cell.getValueOffset(), cell.getValueLength());
                                switch (KeyValue.Type.codeToType(cell.getTypeByte())) {
                                case Put:
                                    // If Put, point delete old Put
                                    Delete del = new Delete(oldRow);
                                    del.addDeleteMarker(new KeyValue(cell.getRowArray(), cell.getRowOffset(),
                                            cell.getRowLength(), cell.getFamilyArray(), cell.getFamilyOffset(),
                                            cell.getFamilyLength(), cell.getQualifierArray(),
                                            cell.getQualifierOffset(), cell.getQualifierLength(),
                                            cell.getTimestamp(), KeyValue.Type.Delete,
                                            ByteUtil.EMPTY_BYTE_ARRAY, 0, 0));
                                    mutations.add(del);

                                    Put put = new Put(newRow);
                                    put.add(newCell);
                                    mutations.add(put);
                                    break;
                                case Delete:
                                case DeleteColumn:
                                case DeleteFamily:
                                case DeleteFamilyVersion:
                                    Delete delete = new Delete(newRow);
                                    delete.addDeleteMarker(newCell);
                                    mutations.add(delete);
                                    break;
                                }
                            }
                        } else if (buildLocalIndex) {
                            for (IndexMaintainer maintainer : indexMaintainers) {
                                if (!results.isEmpty()) {
                                    result.getKey(ptr);
                                    ValueGetter valueGetter = maintainer.createGetterFromKeyValues(
                                            ImmutableBytesPtr.copyBytesIfNecessary(ptr), results);
                                    Put put = maintainer.buildUpdateMutation(kvBuilder, valueGetter, ptr, ts,
                                            c.getEnvironment().getRegion().getRegionInfo().getStartKey(),
                                            c.getEnvironment().getRegion().getRegionInfo().getEndKey());
                                    indexMutations.add(put);
                                }
                            }
                            result.setKeyValues(results);
                        } else if (isDelete) {
                            // FIXME: the version of the Delete constructor without the lock
                            // args was introduced in 0.94.4, thus if we try to use it here
                            // we can no longer use the 0.94.2 version of the client.
                            Cell firstKV = results.get(0);
                            Delete delete = new Delete(firstKV.getRowArray(), firstKV.getRowOffset(),
                                    firstKV.getRowLength(), ts);
                            mutations.add(delete);
                        } else if (isUpsert) {
                            Arrays.fill(values, null);
                            int i = 0;
                            List<PColumn> projectedColumns = projectedTable.getColumns();
                            for (; i < projectedTable.getPKColumns().size(); i++) {
                                Expression expression = selectExpressions.get(i);
                                if (expression.evaluate(result, ptr)) {
                                    values[i] = ptr.copyBytes();
                                    // If SortOrder from expression in SELECT doesn't match the
                                    // column being projected into then invert the bits.
                                    if (expression.getSortOrder() != projectedColumns.get(i).getSortOrder()) {
                                        SortOrder.invert(values[i], 0, values[i], 0, values[i].length);
                                    }
                                }
                            }
                            projectedTable.newKey(ptr, values);
                            PRow row = projectedTable.newRow(kvBuilder, ts, ptr);
                            for (; i < projectedColumns.size(); i++) {
                                Expression expression = selectExpressions.get(i);
                                if (expression.evaluate(result, ptr)) {
                                    PColumn column = projectedColumns.get(i);
                                    Object value = expression.getDataType().toObject(ptr,
                                            column.getSortOrder());
                                    // We are guaranteed that the two column will have the
                                    // same type.
                                    if (!column.getDataType().isSizeCompatible(ptr, value, column.getDataType(),
                                            expression.getMaxLength(), expression.getScale(),
                                            column.getMaxLength(), column.getScale())) {
                                        throw new DataExceedsCapacityException(column.getDataType(),
                                                column.getMaxLength(), column.getScale());
                                    }
                                    column.getDataType().coerceBytes(ptr, value, expression.getDataType(),
                                            expression.getMaxLength(), expression.getScale(),
                                            expression.getSortOrder(), column.getMaxLength(), column.getScale(),
                                            column.getSortOrder(), projectedTable.rowKeyOrderOptimizable());
                                    byte[] bytes = ByteUtil.copyKeyBytesIfNecessary(ptr);
                                    row.setValue(column, bytes);
                                }
                            }
                            for (Mutation mutation : row.toRowMutations()) {
                                mutations.add(mutation);
                            }
                            for (i = 0; i < selectExpressions.size(); i++) {
                                selectExpressions.get(i).reset();
                            }
                        } else if (deleteCF != null && deleteCQ != null) {
                            // No need to search for delete column, since we project only it
                            // if no empty key value is being set
                            if (emptyCF == null || result.getValue(deleteCF, deleteCQ) != null) {
                                Delete delete = new Delete(results.get(0).getRowArray(),
                                        results.get(0).getRowOffset(), results.get(0).getRowLength());
                                delete.deleteColumns(deleteCF, deleteCQ, ts);
                                mutations.add(delete);
                            }
                        }
                        if (emptyCF != null) {
                            /*
                             * If we've specified an emptyCF, then we need to insert an empty
                             * key value "retroactively" for any key value that is visible at
                             * the timestamp that the DDL was issued. Key values that are not
                             * visible at this timestamp will not ever be projected up to
                             * scans past this timestamp, so don't need to be considered.
                             * We insert one empty key value per row per timestamp.
                             */
                            Set<Long> timeStamps = Sets.newHashSetWithExpectedSize(results.size());
                            for (Cell kv : results) {
                                long kvts = kv.getTimestamp();
                                if (!timeStamps.contains(kvts)) {
                                    Put put = new Put(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength());
                                    put.add(emptyCF, QueryConstants.EMPTY_COLUMN_BYTES, kvts,
                                            ByteUtil.EMPTY_BYTE_ARRAY);
                                    mutations.add(put);
                                }
                            }
                        }
                        // Commit in batches based on UPSERT_BATCH_SIZE_ATTRIB in config
                        if (!mutations.isEmpty() && batchSize > 0 && mutations.size() % batchSize == 0) {
                            commitBatch(region, mutations, indexUUID);
                            mutations.clear();
                        }
                        // Commit in batches based on UPSERT_BATCH_SIZE_ATTRIB in config
                        if (!indexMutations.isEmpty() && batchSize > 0
                                && indexMutations.size() % batchSize == 0) {
                            commitIndexMutations(c, region, indexMutations);
                        }
                    } catch (ConstraintViolationException e) {
                        // Log and ignore in count
                        logger.error(LogUtil.addCustomAnnotations(
                                "Failed to create row in " + region.getRegionInfo().getRegionNameAsString()
                                        + " with values " + SchemaUtil.toString(values),
                                ScanUtil.getCustomAnnotations(scan)), e);
                        continue;
                    }
                    aggregators.aggregate(rowAggregators, result);
                    hasAny = true;
                }
            } while (hasMore);
        }
    } finally {
        try {
            if (stats != null) {
                try {
                    stats.updateStatistic(region);
                } finally {
                    stats.close();
                }
            }
        } finally {
            try {
                innerScanner.close();
            } finally {
                region.closeRegionOperation();
            }
        }
    }

    if (logger.isDebugEnabled()) {
        logger.debug(LogUtil.addCustomAnnotations(
                "Finished scanning " + rowCount + " rows for ungrouped coprocessor scan " + scan,
                ScanUtil.getCustomAnnotations(scan)));
    }

    if (!mutations.isEmpty()) {
        commitBatch(region, mutations, indexUUID);
    }

    if (!indexMutations.isEmpty()) {
        commitIndexMutations(c, region, indexMutations);
    }

    final boolean hadAny = hasAny;
    KeyValue keyValue = null;
    if (hadAny) {
        byte[] value = aggregators.toBytes(rowAggregators);
        keyValue = KeyValueUtil.newKeyValue(UNGROUPED_AGG_ROW_KEY, SINGLE_COLUMN_FAMILY, SINGLE_COLUMN,
                AGG_TIMESTAMP, value, 0, value.length);
    }
    final KeyValue aggKeyValue = keyValue;

    RegionScanner scanner = new BaseRegionScanner() {
        private boolean done = !hadAny;

        @Override
        public HRegionInfo getRegionInfo() {
            return innerScanner.getRegionInfo();
        }

        @Override
        public boolean isFilterDone() {
            return done;
        }

        @Override
        public void close() throws IOException {
            innerScanner.close();
        }

        @Override
        public boolean next(List<Cell> results) throws IOException {
            if (done)
                return false;
            done = true;
            results.add(aggKeyValue);
            return false;
        }

        @Override
        public long getMaxResultSize() {
            return scan.getMaxResultSize();
        }

        @Override
        public int getBatch() {
            return innerScanner.getBatch();
        }
    };
    return scanner;
}

From source file:org.n52.sos.encode.SensorMLEncoderv20.java

private void addDescribedObjectValues(DescribedObjectType dot, DescribedObject describedObject)
        throws OwsExceptionReport {
    if (!describedObject.isSetGmlId()) {
        describedObject.setGmlId("do_" + JavaHelper.generateID(describedObject.toString()));
    }/*www . j  ava  2 s.co  m*/
    dot.setId(describedObject.getGmlId());

    // update/set gml:identifier
    if (describedObject.isSetIdentifier()) {
        describedObject.getIdentifierCodeWithAuthority().setCodeSpace(OGCConstants.UNIQUE_ID);
        XmlObject encodeObjectToXml = CodingHelper.encodeObjectToXml(GmlConstants.NS_GML_32,
                describedObject.getIdentifierCodeWithAuthority());
        if (encodeObjectToXml != null) {
            if (dot.isSetIdentifier()) {
                dot.getIdentifier().set(encodeObjectToXml);
            } else {
                dot.addNewIdentifier().set(encodeObjectToXml);
            }
        }
    }

    // merge offerings if set
    if (describedObject.isSetOfferings()) {
        final Set<SweText> offeringsSet = convertOfferingsToSet(describedObject.getOfferings());
        mergeCapabilities(describedObject, SensorMLConstants.ELEMENT_NAME_OFFERINGS,
                SensorMLConstants.OFFERING_FIELD_DEFINITION, null, offeringsSet);
    }
    // set capabilities
    // TODO remove parentProcedure from capabilities
    if (describedObject.isSetCapabilities()) {
        final Capabilities[] existing = dot.getCapabilitiesArray();
        final Set<String> names = Sets.newHashSetWithExpectedSize(existing.length);
        for (final Capabilities element : existing) {
            if (element.getName() != null) {
                names.add(element.getName());
            }
        }
        for (final SmlCapabilities sosCapability : describedObject.getCapabilities()) {
            // check for observedBBOX, currently not supported, how to model
            // in SML 2.0?
            // Update Discovery Profile
            if (!SensorMLConstants.ELEMENT_NAME_OBSERVED_BBOX.equals(sosCapability.getName())) {
                final Capabilities c = createCapability(sosCapability);
                // replace existing capability with the same name
                if (c != null) {
                    if (names.contains(c.getName())) {
                        removeCapability(dot, c);
                    }
                    dot.addNewCapabilities().set(c);
                }
            }
        }
    }
    // set description
    if (describedObject.isSetDescription() && !dot.isSetDescription()) {
        dot.addNewDescription().setStringValue(describedObject.getDescription());
    }
    // set names
    if (describedObject.isSetName() && CollectionHelper.isNullOrEmpty(dot.getNameArray())) {
        // TODO check if override existing names
        addNamesToAbstractProcess(dot, describedObject.getNames());
    }
    // set location
    // set extension
    // set keywords
    if (describedObject.isSetKeywords()) {
        if (CollectionHelper.isNullOrEmpty(dot.getKeywordsArray())) {
            final List<String> keywords = describedObject.getKeywords();
            // final int length = dot.getKeywordsArray().length;
            // for (int i = 0; i < length; ++i) {
            // dot.removeKeywords(i);
            // }
            dot.addNewKeywords().addNewKeywordList()
                    .setKeywordArray(keywords.toArray(new String[keywords.size()]));
        } else {
            // TODO
        }
    }
    // set identification
    if (describedObject.isSetIdentifications()) {
        if (!CollectionHelper.isNullOrEmpty(dot.getIdentificationArray())) {
            // TODO check for merging identifications if exists
        }
        dot.setIdentificationArray(createIdentification(describedObject.getIdentifications()));
    }
    // set classification
    if (describedObject.isSetClassifications()) {
        dot.setClassificationArray(createClassification(describedObject.getClassifications()));
    }
    // set validTime
    if (describedObject.isSetValidTime() && CollectionHelper.isNullOrEmpty(dot.getValidTimeArray())) {
        final Time time = describedObject.getValidTime();
        final XmlObject xbtime = CodingHelper.encodeObjectToXml(GmlConstants.NS_GML_32, time);
        if (time instanceof TimeInstant) {
            dot.addNewValidTime().addNewTimeInstant().set(xbtime);
        } else if (time instanceof TimePeriod) {
            dot.addNewValidTime().addNewTimePeriod().set(xbtime);
        }
        // } else {
        // TODO remove or
        // remove existing validTime element
        // final XmlCursor newCursor = dot.getValidTime().newCursor();
        // newCursor.removeXml();
        // newCursor.dispose();

    }
    // set securityConstraints
    // set legalConstraints
    // set characteristics
    if (describedObject.isSetCharacteristics()) {
        dot.setCharacteristicsArray(createCharacteristics(describedObject.getCharacteristics()));
    }
    // set contacts if contacts aren't already present in the abstract
    // process
    // if (describedObject.isSetContact() &&
    // CollectionHelper.isNotNullOrEmpty(dot.getContactsArray())) {
    if (describedObject.isSetContact()) {
        ContactListType cl = ContactListType.Factory.newInstance();
        for (SmlContact contact : describedObject.getContact()) {
            if (contact instanceof SmlResponsibleParty) {
                XmlObject encodeObjectToXml = CodingHelper.encodeObjectToXml(GmdConstants.NS_GMD,
                        (SmlResponsibleParty) contact);
                if (encodeObjectToXml != null) {
                    cl.addNewContact().addNewCIResponsibleParty().set(encodeObjectToXml);
                }
            }
        }
        if (CollectionHelper.isNotNullOrEmpty(cl.getContactArray())) {
            dot.addNewContacts().setContactList(cl);
        }
    }
    // set documentation
    if (describedObject.isSetDocumentation()) {
        dot.setDocumentationArray(createDocumentationArray(describedObject.getDocumentation()));
    }
    // set history

}

From source file:com.google.gerrit.server.git.MergeUtil.java

public Set<Change.Id> findUnmergedChanges(Set<Change.Id> expected, CodeReviewRevWalk rw, RevFlag canMergeFlag,
        CodeReviewCommit oldTip, CodeReviewCommit mergeTip, Iterable<Change.Id> alreadyMerged)
        throws IntegrationException {
    if (mergeTip == null) {
        return expected;
    }//from   w  ww  .j a  v a 2 s .  co m

    try {
        Set<Change.Id> found = Sets.newHashSetWithExpectedSize(expected.size());
        Iterables.addAll(found, alreadyMerged);
        rw.resetRetain(canMergeFlag);
        rw.sort(RevSort.TOPO);
        rw.markStart(mergeTip);
        if (oldTip != null) {
            rw.markUninteresting(oldTip);
        }

        CodeReviewCommit c;
        while ((c = rw.next()) != null) {
            if (c.getPatchsetId() == null) {
                continue;
            }
            Change.Id id = c.getPatchsetId().getParentKey();
            if (!expected.contains(id)) {
                continue;
            }
            found.add(id);
            if (found.size() == expected.size()) {
                return Collections.emptySet();
            }
        }
        return Sets.difference(expected, found);
    } catch (IOException e) {
        throw new IntegrationException("Cannot check if changes were merged", e);
    }
}

From source file:com.opengamma.financial.analytics.model.curve.interestrate.MarketInstrumentImpliedYieldCurveFunction.java

private Set<ComputedValue> execute(final FunctionExecutionContext executionContext,
        final ComputationTargetSpecification targetSpec, final String curveName,
        final InterpolatedYieldCurveSpecificationWithSecurities specificationWithSecurities,
        final SnapshotDataBundle marketData, final HistoricalTimeSeriesBundle timeSeries,
        final boolean createYieldCurve, final boolean createJacobian, final boolean createSensitivities) {
    final Clock snapshotClock = executionContext.getValuationClock();
    final ZonedDateTime now = ZonedDateTime.now(snapshotClock);
    final List<InstrumentDerivative> derivatives = new ArrayList<>();
    final int n = specificationWithSecurities.getStrips().size();
    final double[] initialRatesGuess = new double[n];
    final double[] nodeTimes = new double[n];
    final double[] marketValues = new double[n];
    int i = 0;/*from  w  w w .  ja  v a  2  s .c  o m*/
    for (final FixedIncomeStripWithSecurity strip : specificationWithSecurities.getStrips()) {
        Double marketValue = marketData.getDataPoint(strip.getSecurityIdentifier());
        if (marketValue == null) {
            throw new NullPointerException("Could not get market data for " + strip);
        }
        InstrumentDerivative derivative;

        final FinancialSecurity financialSecurity = (FinancialSecurity) strip.getSecurity();
        final String[] curveNames = FixedIncomeInstrumentCurveExposureHelper
                .getCurveNamesForFundingCurveInstrument(strip.getInstrumentType(), curveName, curveName);

        final InstrumentDefinition<?> definition = getSecurityConverter().visit(financialSecurity);
        if (strip.getSecurity().getSecurityType().equals("FUTURE")) {
            marketValue = 1 - marketValue; // transform to rate for initial rates guess
        }
        try {
            derivative = getDefinitionConverter().convert(financialSecurity, definition, now, curveNames,
                    timeSeries);
        } catch (final OpenGammaRuntimeException ogre) {
            s_logger.error(
                    "Error thrown by convertor for security {}, definition {}, time {}, curveNames {}, dataSource {}",
                    new Object[] { financialSecurity, definition, now, curveNames, timeSeries });
            throw ogre;
        }
        if (derivative == null) {
            throw new NullPointerException("Had a null InterestRateDefinition for " + strip);
        }
        if (_calcTypeParRate) {
            marketValues[i] = marketValue;
        }
        derivatives.add(derivative);
        initialRatesGuess[i] = marketValue;
        nodeTimes[i] = derivative.accept(LAST_DATE_CALCULATOR);
        i++;
    }
    ParallelArrayBinarySort.parallelBinarySort(nodeTimes, initialRatesGuess);
    final LinkedHashMap<String, double[]> curveKnots = new LinkedHashMap<>();
    curveKnots.put(curveName, nodeTimes);
    final LinkedHashMap<String, double[]> curveNodes = new LinkedHashMap<>();
    final LinkedHashMap<String, Interpolator1D> interpolators = new LinkedHashMap<>();
    curveNodes.put(curveName, nodeTimes);
    interpolators.put(curveName, getInterpolator(specificationWithSecurities));
    // TODO have use finite difference or not as an input [FIN-147]
    final Currency currency = Currency.of(targetSpec.getUniqueId().getValue());
    final MultipleYieldCurveFinderDataBundle data = new MultipleYieldCurveFinderDataBundle(derivatives,
            marketValues, null, curveNodes, interpolators, false, new FXMatrix(currency));
    final Function1D<DoubleMatrix1D, DoubleMatrix1D> curveCalculator = new MultipleYieldCurveFinderFunction(
            data, getCalculator());
    final Function1D<DoubleMatrix1D, DoubleMatrix2D> jacobianCalculator = new MultipleYieldCurveFinderJacobian(
            data, getSensitivityCalculator());
    NewtonVectorRootFinder rootFinder;
    double[] yields = null;
    try {
        // TODO have the decomposition as an optional input [FIN-146]
        rootFinder = new BroydenVectorRootFinder(1e-7, 1e-7, 100,
                DecompositionFactory.getDecomposition(DecompositionFactory.SV_COLT_NAME));
        final DoubleMatrix1D result = rootFinder.getRoot(curveCalculator, jacobianCalculator,
                new DoubleMatrix1D(initialRatesGuess));
        yields = result.getData();
    } catch (final Exception eLU) {
        try {
            s_logger.warn("Could not find root using LU decomposition and present value method for curve "
                    + curveName + "; trying SV. Error was: " + eLU.getMessage());
            rootFinder = new BroydenVectorRootFinder(1e-7, 1e-7, 100,
                    DecompositionFactory.getDecomposition(DecompositionFactory.SV_COMMONS_NAME));
            yields = rootFinder
                    .getRoot(curveCalculator, jacobianCalculator, new DoubleMatrix1D(initialRatesGuess))
                    .getData();
        } catch (final Exception eSV) {
            s_logger.warn("Could not find root using SV decomposition and present value method for curve "
                    + curveName + ". Error was: " + eSV.getMessage());
            throw new OpenGammaRuntimeException(eSV.getMessage());
        }
    }
    final YieldAndDiscountCurve curve;
    if (createSensitivities || createYieldCurve) {
        curve = YieldCurve.from(
                InterpolatedDoublesCurve.from(nodeTimes, yields, getInterpolator(specificationWithSecurities)));
    } else {
        curve = null;
    }
    final Set<ComputedValue> result = Sets.newHashSetWithExpectedSize(4);
    final ValueProperties.Builder properties = createValueProperties()
            .with(ValuePropertyNames.CURVE_CALCULATION_METHOD, getCalculationType())
            .with(YieldCurveFunction.PROPERTY_FORWARD_CURVE, curveName)
            .with(YieldCurveFunction.PROPERTY_FUNDING_CURVE, curveName);
    if (createJacobian) {
        final DoubleMatrix2D jacobianMatrix = jacobianCalculator.evaluate(new DoubleMatrix1D(yields));
        result.add(new ComputedValue(new ValueSpecification(ValueRequirementNames.YIELD_CURVE_JACOBIAN,
                targetSpec, properties.get()), jacobianMatrix.getData()));
    }
    if (createSensitivities) {
        final double[] couponSensitivities = new double[derivatives.size()];
        int ii = 0;
        final String[] curveNames = new String[] { curveName, curveName };
        final YieldAndDiscountCurve[] curves = new YieldAndDiscountCurve[] { curve, curve };
        final YieldCurveBundle curveBundle = new YieldCurveBundle(curveNames, curves);
        for (final InstrumentDerivative derivative : derivatives) {
            couponSensitivities[ii++] = derivative.accept(getCouponSensitivityCalculator(), curveBundle);
        }
        result.add(
                new ComputedValue(new ValueSpecification(ValueRequirementNames.PRESENT_VALUE_COUPON_SENSITIVITY,
                        targetSpec, properties.get()), new DoubleMatrix1D(couponSensitivities)));
    }
    if (createYieldCurve) {
        result.add(new ComputedValue(new ValueSpecification(ValueRequirementNames.YIELD_CURVE, targetSpec,
                properties.with(ValuePropertyNames.CURVE, curveName).get()), curve));
    }
    return result;
}

From source file:com.opengamma.engine.calcnode.SimpleCalculationNode.java

private static Set<ValueRequirement> plat2290(final ValueSpecification[] outputs) {
    final Set<ValueRequirement> result = Sets.newHashSetWithExpectedSize(outputs.length);
    for (final ValueSpecification output : outputs) {
        result.add(output.toRequirementSpecification());
    }/*  w  w w  . java2 s. c  o  m*/
    return result;
}

From source file:edu.buaa.satla.analysis.util.VariableClassification.java

/** This function handles a declaration with an optional initializer.
 * Only simple types are handled. */
private void handleDeclarationEdge(final CDeclarationEdge edge) {
    CDeclaration declaration = edge.getDeclaration();
    if (!(declaration instanceof CVariableDeclaration)) {
        return;//w w w .  j ava 2s  . co m
    }

    CVariableDeclaration vdecl = (CVariableDeclaration) declaration;
    String varName = vdecl.getQualifiedName();

    // "connect" the edge with its partition
    Set<String> var = Sets.newHashSetWithExpectedSize(1);
    var.add(varName);
    dependencies.addAll(var, new HashSet<BigInteger>(), edge, 0);

    // only simple types (int, long) are allowed for booleans, ...
    if (!(vdecl.getType() instanceof CSimpleType)) {
        nonIntBoolVars.add(varName);
        nonIntEqVars.add(varName);
        nonIntAddVars.add(varName);
    }

    final CInitializer initializer = vdecl.getInitializer();
    List<CExpressionAssignmentStatement> l;

    try {
        l = CInitializers.convertToAssignments(vdecl, edge);
    } catch (UnrecognizedCCodeException should_not_happen) {
        throw new AssertionError(should_not_happen);
    }

    for (CExpressionAssignmentStatement init : l) {
        final CLeftHandSide lhsExpression = init.getLeftHandSide();
        final VariableOrField lhs = lhsExpression.accept(collectingLHSVisitor);

        final CExpression rhs = init.getRightHandSide();
        rhs.accept(new CollectingRHSVisitor(lhs));
    }

    if ((initializer == null) || !(initializer instanceof CInitializerExpression)) {
        return;
    }

    CExpression exp = ((CInitializerExpression) initializer).getExpression();
    if (exp == null) {
        return;
    }

    handleExpression(edge, exp, varName, VariableOrField.newVariable(varName));
}

From source file:org.sosy_lab.cpachecker.util.VariableClassificationBuilder.java

/** evaluates an expression and adds containing vars to the sets.
 * the id is the position of the expression in the edge,
 * it is 0 for all edges except a FuntionCallEdge. */
private void handleExpression(CFAEdge edge, CExpression exp, String varName, int id,
        final VariableOrField lhs) {
    CFANode pre = edge.getPredecessor();

    VariablesCollectingVisitor dcv = new VariablesCollectingVisitor(pre);
    Set<String> vars = exp.accept(dcv);
    if (vars == null) {
        vars = Sets.newHashSetWithExpectedSize(1);
    }//from   www . ja  v a2 s. c  o  m

    vars.add(varName);
    allVars.addAll(vars);
    dependencies.addAll(vars, dcv.getValues(), edge, id);

    BoolCollectingVisitor bcv = new BoolCollectingVisitor(pre);
    Set<String> possibleBoolean = exp.accept(bcv);
    handleResult(varName, possibleBoolean, nonIntBoolVars);

    IntEqualCollectingVisitor ncv = new IntEqualCollectingVisitor(pre);
    Set<String> possibleIntEqualVars = exp.accept(ncv);
    handleResult(varName, possibleIntEqualVars, nonIntEqVars);

    IntAddCollectingVisitor icv = new IntAddCollectingVisitor(pre);
    Set<String> possibleIntAddVars = exp.accept(icv);
    handleResult(varName, possibleIntAddVars, nonIntAddVars);

    exp.accept(new CollectingRHSVisitor(lhs));
}

From source file:com.opengamma.engine.calcnode.SimpleCalculationNode.java

private void invoke(final CalculationJobItem jobItem, final DeferredInvocationStatistics statistics,
        final CalculationJobResultItemBuilder resultItemBuilder) throws AsynchronousExecution {
    final ValueSpecification[] outputs = jobItem.getOutputs();
    final String functionUniqueId = jobItem.getFunctionUniqueIdentifier();
    Future<ComputationTarget> targetFuture = null;
    ComputationTarget target = null;/*from  www .j  ava 2s .com*/
    if (isUseAsynchronousTargetResolve()) {
        targetFuture = getExecutorService().submit(new Callable<ComputationTarget>() {
            @Override
            public ComputationTarget call() {
                return getTargetResolver().resolve(jobItem.getComputationTargetSpecification());
            }
        });
    } else {
        target = LazyComputationTargetResolver.resolve(getTargetResolver(),
                jobItem.getComputationTargetSpecification());
    }
    final FunctionInvoker invoker = getFunctions().getInvoker(functionUniqueId);
    if (invoker == null) {
        postEvaluationErrors(outputs, MissingOutput.EVALUATION_ERROR);
        resultItemBuilder.withException(ERROR_BAD_FUNCTION,
                "Unable to locate " + functionUniqueId + " in function repository");
        return;
    }
    // set parameters
    getFunctionExecutionContext().setFunctionParameters(jobItem.getFunctionParameters());
    // assemble inputs
    final ValueSpecification[] inputValueSpecs = jobItem.getInputs();
    final Set<ValueSpecification> missing = Sets.newHashSetWithExpectedSize(inputValueSpecs.length);
    if (!isUseAsynchronousTargetResolve() && (target == null)) {
        if (invoker.canHandleMissingInputs()) {
            // A missing target is just a special case of missing input
            missing.add(
                    TargetSourcingFunction.createSpecification(jobItem.getComputationTargetSpecification()));
        } else {
            postEvaluationErrors(outputs, MissingOutput.EVALUATION_ERROR);
            resultItemBuilder.withException(ERROR_CANT_RESOLVE,
                    "Unable to resolve target " + jobItem.getComputationTargetSpecification());
            return;
        }
    }
    final Collection<ComputedValue> inputs = new ArrayList<ComputedValue>(inputValueSpecs.length);
    int inputBytes = 0;
    int inputSamples = 0;
    final DeferredViewComputationCache cache = getCache();
    _inputs._inputs = inputValueSpecs;
    for (final Pair<ValueSpecification, Object> input : cache.getValues(_inputs,
            getJob().getCacheSelectHint())) {
        if ((input.getSecond() == null) || (input.getSecond() instanceof MissingValue)) {
            missing.add(input.getFirst());
        } else {
            final ComputedValue value = new ComputedValue(input.getFirst(), input.getSecond());
            inputs.add(value);
            final Integer bytes = cache.estimateValueSize(value);
            if (bytes != null) {
                inputBytes += bytes;
                inputSamples++;
            }
        }
    }
    statistics.setDataInputBytes(inputBytes, inputSamples);
    if (!missing.isEmpty()) {
        if (invoker.canHandleMissingInputs()) {
            s_logger.debug("Executing even with missing inputs {}", missing);
            resultItemBuilder.withPartialInputs(missing);
        } else {
            s_logger.info("Not able to execute as missing inputs {}", missing);
            if (targetFuture != null) {
                // Cancelling doesn't do anything so we have to block and clear the result
                try {
                    targetFuture.get();
                } catch (final Throwable t) {
                    s_logger.warn("Error resolving target", t);
                    postEvaluationErrors(outputs, MissingOutput.EVALUATION_ERROR);
                    resultItemBuilder.withException(t);
                    return;
                }
            }
            postEvaluationErrors(jobItem.getOutputs(), MissingOutput.MISSING_INPUTS);
            resultItemBuilder.withMissingInputs(missing);
            return;
        }
    }
    final FunctionInputs functionInputs = new FunctionInputsImpl(getTargetResolver().getSpecificationResolver(),
            inputs, missing);
    if (target == null) {
        try {
            target = targetFuture.get();
        } catch (final Throwable t) {
            s_logger.warn("Error resolving target", t);
            postEvaluationErrors(outputs, MissingOutput.EVALUATION_ERROR);
            resultItemBuilder.withException(t);
            return;
        }
        if (target == null) {
            if (invoker.canHandleMissingInputs()) {
                // A missing target is just a special case of missing input
                missing.add(new ValueSpecification(ValueRequirementNames.TARGET,
                        jobItem.getComputationTargetSpecification(),
                        ValueProperties.with(ValuePropertyNames.FUNCTION, "TargetSourcingFunction").get()));
            } else {
                postEvaluationErrors(outputs, MissingOutput.EVALUATION_ERROR);
                resultItemBuilder.withException(ERROR_CANT_RESOLVE,
                        "Unable to resolve target " + jobItem.getComputationTargetSpecification());
                return;
            }
        }
    }
    // Execute
    statistics.beginInvocation();
    Set<ComputedValue> result;
    try {
        result = invoker.execute(getFunctionExecutionContext(), functionInputs, target, plat2290(outputs));
    } catch (final AsynchronousExecution e) {
        s_logger.debug("Asynchronous execution of {} at {}", jobItem, _nodeId);
        final AsynchronousOperation<Deferred<Void>> async = deferredOperation();
        e.setResultListener(new ResultListener<Set<ComputedValue>>() {
            @Override
            public void operationComplete(final AsynchronousResult<Set<ComputedValue>> result) {
                s_logger.debug("Job item {} result available", jobItem);
                async.getCallback().setResult(new Deferred<Void>() {
                    @Override
                    public Void call(final SimpleCalculationNode self) {
                        s_logger.debug("Asynchronous result for {} at {}", jobItem, self._nodeId);
                        Set<ComputedValue> results;
                        try {
                            results = result.getResult();
                        } catch (final FunctionBlacklistedException e) {
                            self.invocationBlacklisted(jobItem, resultItemBuilder);
                            return null;
                        } catch (final Throwable t) {
                            self.invokeException(outputs, t, resultItemBuilder);
                            return null;
                        }
                        self.invokeResult(invoker, statistics, missing, outputs, results, resultItemBuilder);
                        return null;
                    }
                });
            }
        });
        async.getResult().call(this);
        return;
    } catch (final FunctionBlacklistedException e) {
        invocationBlacklisted(jobItem, resultItemBuilder);
        return;
    } catch (final Throwable t) {
        invokeException(outputs, t, resultItemBuilder);
        return;
    }
    invokeResult(invoker, statistics, missing, outputs, result, resultItemBuilder);
}