Example usage for org.apache.commons.lang3.tuple MutablePair setRight

List of usage examples for org.apache.commons.lang3.tuple MutablePair setRight

Introduction

In this page you can find the example usage for org.apache.commons.lang3.tuple MutablePair setRight.

Prototype

public void setRight(final R right) 

Source Link

Document

Sets the right element of the pair.

Usage

From source file:org.apache.apex.malhar.lib.window.accumulation.Average.java

@Override
public MutablePair<Double, Long> accumulate(MutablePair<Double, Long> accu, Double input) {
    accu.setLeft(accu.getLeft() * ((double) accu.getRight() / (accu.getRight() + 1))
            + input / (accu.getRight() + 1));
    accu.setRight(accu.getRight() + 1);
    return accu;//from  www.j a  v a2s. c  o m
}

From source file:org.apache.apex.malhar.lib.window.accumulation.Average.java

@Override
public MutablePair<Double, Long> merge(MutablePair<Double, Long> accu1, MutablePair<Double, Long> accu2) {
    accu1.setLeft(accu1.getLeft() * ((double) accu1.getRight() / accu1.getRight() + accu2.getRight())
            + accu2.getLeft() * ((double) accu2.getRight() / accu1.getRight() + accu2.getRight()));
    accu1.setRight(accu1.getRight() + accu2.getRight());
    return accu1;
}

From source file:org.apache.giraph.block_app.reducers.array.ArrayReduce.java

/**
 * Registers one new reducer, that will reduce array of objects,
 * by reducing individual elements using {@code elementReduceOp}.
 *
 * This function will return ReducerArrayHandle to it, by which
 * individual elements can be manipulated separately.
 *
 * @param fixedSize Number of elements/*from www.j  a v a2 s. c  om*/
 * @param elementReduceOp ReduceOperation for individual elements
 * @param createFunction Function for creating a reducer
 * @return Created ReducerArrayHandle
 */
public static <S, T extends Writable> ReducerArrayHandle<S, T> createArrayHandles(final int fixedSize,
        ReduceOperation<S, T> elementReduceOp, CreateReducerFunctionApi createFunction) {
    final ReducerHandle<Pair<IntRef, S>, ArrayWritable<T>> reduceHandle = createFunction
            .createReducer(new ArrayReduce<>(fixedSize, elementReduceOp));

    final IntRef curIndex = new IntRef(0);
    final MutablePair<IntRef, S> reusablePair = MutablePair.of(new IntRef(0), null);
    final ReducerHandle<S, T> elementReduceHandle = new ReducerHandle<S, T>() {
        @Override
        public T getReducedValue(MasterGlobalCommUsage master) {
            ArrayWritable<T> result = reduceHandle.getReducedValue(master);
            return result.get()[curIndex.value];
        }

        @Override
        public void reduce(S valueToReduce) {
            reusablePair.getLeft().value = curIndex.value;
            reusablePair.setRight(valueToReduce);
            reduceHandle.reduce(reusablePair);
        }

        @Override
        public BroadcastHandle<T> broadcastValue(BlockMasterApi master) {
            throw new UnsupportedOperationException();
        }
    };

    return new ReducerArrayHandle<S, T>() {
        @Override
        public ReducerHandle<S, T> get(int index) {
            curIndex.value = index;
            return elementReduceHandle;
        }

        @Override
        public int getStaticSize() {
            return fixedSize;
        }

        @Override
        public int getReducedSize(BlockMasterApi master) {
            return getStaticSize();
        }

        @Override
        public BroadcastArrayHandle<T> broadcastValue(BlockMasterApi master) {
            final BroadcastHandle<ArrayWritable<T>> broadcastHandle = reduceHandle.broadcastValue(master);
            final IntRef curIndex = new IntRef(0);
            final BroadcastHandle<T> elementBroadcastHandle = new BroadcastHandle<T>() {
                @Override
                public T getBroadcast(WorkerBroadcastUsage worker) {
                    ArrayWritable<T> result = broadcastHandle.getBroadcast(worker);
                    return result.get()[curIndex.value];
                }
            };
            return new BroadcastArrayHandle<T>() {
                @Override
                public BroadcastHandle<T> get(int index) {
                    curIndex.value = index;
                    return elementBroadcastHandle;
                }

                @Override
                public int getStaticSize() {
                    return fixedSize;
                }

                @Override
                public int getBroadcastedSize(WorkerBroadcastUsage worker) {
                    return getStaticSize();
                }
            };
        }
    };
}

From source file:org.apache.giraph.block_app.reducers.array.BasicArrayReduce.java

/**
 * Registers one new reducer, that will reduce BasicArray,
 * by reducing individual elements using {@code elementReduceOp},
 * with predefined size./*  w w  w.  j  av  a2  s.  c  om*/
 *
 * This function will return ReducerArrayHandle, by which
 * individual elements can be manipulated separately.
 *
 * @param fixedSize Number of elements
 * @param typeOps TypeOps of individual elements
 * @param elementReduceOp ReduceOperation for individual elements
 * @param createFunction Function for creating a reducer
 * @return Created ReducerArrayHandle
 */
public static <S, R extends Writable> ReducerArrayHandle<S, R> createArrayHandles(final int fixedSize,
        final PrimitiveTypeOps<R> typeOps, ReduceOperation<S, R> elementReduceOp,
        CreateReducerFunctionApi createFunction) {
    final ReducerHandle<Pair<IntRef, S>, WArrayList<R>> reduceHandle = createFunction
            .createReducer(new BasicArrayReduce<>(fixedSize, typeOps, elementReduceOp));
    final IntRef curIndex = new IntRef(0);
    final R reusableValue = typeOps.create();
    final R initialValue = elementReduceOp.createInitialValue();
    final MutablePair<IntRef, S> reusablePair = MutablePair.of(new IntRef(0), null);
    final ReducerHandle<S, R> elementReduceHandle = new ReducerHandle<S, R>() {
        @Override
        public R getReducedValue(MasterGlobalCommUsage master) {
            WArrayList<R> result = reduceHandle.getReducedValue(master);
            if (fixedSize == -1 && curIndex.value >= result.size()) {
                typeOps.set(reusableValue, initialValue);
            } else {
                result.getIntoW(curIndex.value, reusableValue);
            }
            return reusableValue;
        }

        @Override
        public void reduce(S valueToReduce) {
            reusablePair.getLeft().value = curIndex.value;
            reusablePair.setRight(valueToReduce);
            reduceHandle.reduce(reusablePair);
        }

        @Override
        public BroadcastHandle<R> broadcastValue(BlockMasterApi master) {
            throw new UnsupportedOperationException();
        }
    };

    return new ReducerArrayHandle<S, R>() {
        @Override
        public ReducerHandle<S, R> get(int index) {
            curIndex.value = index;
            return elementReduceHandle;
        }

        @Override
        public int getStaticSize() {
            if (fixedSize == -1) {
                throw new UnsupportedOperationException("Cannot call size, when one is not specified upfront");
            }
            return fixedSize;
        }

        @Override
        public int getReducedSize(BlockMasterApi master) {
            return reduceHandle.getReducedValue(master).size();
        }

        @Override
        public BroadcastArrayHandle<R> broadcastValue(BlockMasterApi master) {
            final BroadcastHandle<WArrayList<R>> broadcastHandle = reduceHandle.broadcastValue(master);
            final IntRef curIndex = new IntRef(0);
            final R reusableValue = typeOps.create();
            final BroadcastHandle<R> elementBroadcastHandle = new BroadcastHandle<R>() {
                @Override
                public R getBroadcast(WorkerBroadcastUsage worker) {
                    WArrayList<R> result = broadcastHandle.getBroadcast(worker);
                    if (fixedSize == -1 && curIndex.value >= result.size()) {
                        typeOps.set(reusableValue, initialValue);
                    } else {
                        result.getIntoW(curIndex.value, reusableValue);
                    }
                    return reusableValue;
                }
            };
            return new BroadcastArrayHandle<R>() {
                @Override
                public BroadcastHandle<R> get(int index) {
                    curIndex.value = index;
                    return elementBroadcastHandle;
                }

                @Override
                public int getStaticSize() {
                    if (fixedSize == -1) {
                        throw new UnsupportedOperationException(
                                "Cannot call size, when one is not specified upfront");
                    }
                    return fixedSize;
                }

                @Override
                public int getBroadcastedSize(WorkerBroadcastUsage worker) {
                    return broadcastHandle.getBroadcast(worker).size();
                }
            };
        }
    };
}

From source file:org.apache.giraph.block_app.reducers.map.BasicMapReduce.java

/**
 * Registers one new reducer, that will reduce BasicMap,
 * by reducing individual elements corresponding to the same key
 * using {@code elementReduceOp}.//from w w  w .  j  a va  2s. c o m
 *
 * This function will return ReducerMapHandle, by which
 * individual elements can be manipulated separately.
 *
 * @param keyTypeOps TypeOps of keys
 * @param typeOps TypeOps of individual elements
 * @param elementReduceOp ReduceOperation for individual elements
 * @param createFunction Function for creating a reducer
 * @return Created ReducerMapHandle
 */
public static <K extends WritableComparable, S, R extends Writable> ReducerMapHandle<K, S, R> createMapHandles(
        final PrimitiveIdTypeOps<K> keyTypeOps, final PrimitiveTypeOps<R> typeOps,
        ReduceOperation<S, R> elementReduceOp, CreateReducerFunctionApi createFunction) {
    final ReducerHandle<Pair<K, S>, Basic2ObjectMap<K, R>> reduceHandle = createFunction
            .createReducer(new BasicMapReduce<>(keyTypeOps, typeOps, elementReduceOp));
    final K curIndex = keyTypeOps.create();
    final R reusableValue = typeOps.create();
    final R initialValue = elementReduceOp.createInitialValue();
    final MutablePair<K, S> reusablePair = MutablePair.of(null, null);
    final ReducerHandle<S, R> elementReduceHandle = new ReducerHandle<S, R>() {
        @Override
        public R getReducedValue(MasterGlobalCommUsage master) {
            Basic2ObjectMap<K, R> result = reduceHandle.getReducedValue(master);
            R value = result.get(curIndex);
            if (value == null) {
                typeOps.set(reusableValue, initialValue);
            } else {
                typeOps.set(reusableValue, value);
            }
            return reusableValue;
        }

        @Override
        public void reduce(S valueToReduce) {
            reusablePair.setLeft(curIndex);
            reusablePair.setRight(valueToReduce);
            reduceHandle.reduce(reusablePair);
        }

        @Override
        public BroadcastHandle<R> broadcastValue(BlockMasterApi master) {
            throw new UnsupportedOperationException();
        }
    };

    return new ReducerMapHandle<K, S, R>() {
        @Override
        public ReducerHandle<S, R> get(K key) {
            keyTypeOps.set(curIndex, key);
            return elementReduceHandle;
        }

        @Override
        public int getReducedSize(BlockMasterApi master) {
            return reduceHandle.getReducedValue(master).size();
        }

        @Override
        public BroadcastMapHandle<K, R> broadcastValue(BlockMasterApi master) {
            final BroadcastHandle<Basic2ObjectMap<K, R>> broadcastHandle = reduceHandle.broadcastValue(master);
            final K curIndex = keyTypeOps.create();
            final R reusableValue = typeOps.create();
            final BroadcastHandle<R> elementBroadcastHandle = new BroadcastHandle<R>() {
                @Override
                public R getBroadcast(WorkerBroadcastUsage worker) {
                    Basic2ObjectMap<K, R> result = broadcastHandle.getBroadcast(worker);
                    R value = result.get(curIndex);
                    if (value == null) {
                        typeOps.set(reusableValue, initialValue);
                    } else {
                        typeOps.set(reusableValue, value);
                    }
                    return reusableValue;
                }
            };
            return new BroadcastMapHandle<K, R>() {
                @Override
                public BroadcastHandle<R> get(K key) {
                    keyTypeOps.set(curIndex, key);
                    return elementBroadcastHandle;
                }

                @Override
                public int getBroadcastedSize(WorkerBroadcastUsage worker) {
                    return broadcastHandle.getBroadcast(worker).size();
                }
            };
        }
    };
}

From source file:org.apache.giraph.comm.flow_control.CreditBasedFlowControl.java

@Override
public void messageAckReceived(int taskId, long requestId, int response) {
    boolean ignoreCredit = shouldIgnoreCredit(response);
    short credit = getCredit(response);
    int timestamp = getTimestamp(response);
    MutablePair<AdjustableSemaphore, Integer> pair = (MutablePair<AdjustableSemaphore, Integer>) perWorkerOpenRequestMap
            .get(taskId);//from  w ww  .j  a  va2 s  . c o  m
    AdjustableSemaphore openRequestPermit = pair.getLeft();
    // Release a permit on open requests if we received ACK of a request other
    // than a Resume request (resume requests are always sent regardless of
    // number of open requests)
    if (!resumeRequestsId.get(taskId).remove(requestId)) {
        openRequestPermit.release();
    } else if (LOG.isDebugEnabled()) {
        LOG.debug("messageAckReceived: ACK of resume received from " + taskId + " timestamp=" + timestamp);
    }
    if (!ignoreCredit) {
        synchronized (pair) {
            if (compareTimestamps(timestamp, pair.getRight()) > 0) {
                pair.setRight(timestamp);
                openRequestPermit.setMaxPermits(credit);
            } else if (LOG.isDebugEnabled()) {
                LOG.debug("messageAckReceived: received out-of-order messages." + "Received timestamp="
                        + timestamp + " and current timestamp=" + pair.getRight());
            }
        }
    }
    // Since we received a response and we changed the credit of the sender
    // client, we may be able to send some more requests to the sender
    // client. So, we try to send as much request as we can to the sender
    // client.
    trySendCachedRequests(taskId);
}

From source file:org.apache.giraph.comm.flow_control.CreditBasedFlowControl.java

/**
 * Process a resume signal came from a given worker
 *
 * @param clientId id of the worker that sent the signal
 * @param credit the credit value sent along with the resume signal
 * @param requestId timestamp (request id) of the resume signal
 *///  w  w w . j  a v  a 2s  .  co  m
public void processResumeSignal(int clientId, short credit, long requestId) {
    int timestamp = (int) (requestId & 0xFFFF);
    if (LOG.isDebugEnabled()) {
        LOG.debug("processResumeSignal: resume signal from " + clientId + " with timestamp=" + timestamp);
    }
    MutablePair<AdjustableSemaphore, Integer> pair = (MutablePair<AdjustableSemaphore, Integer>) perWorkerOpenRequestMap
            .get(clientId);
    synchronized (pair) {
        if (compareTimestamps(timestamp, pair.getRight()) > 0) {
            pair.setRight(timestamp);
            pair.getLeft().setMaxPermits(credit);
        } else if (LOG.isDebugEnabled()) {
            LOG.debug("processResumeSignal: received out-of-order messages. " + "Received timestamp="
                    + timestamp + " and current timestamp=" + pair.getRight());
        }
    }
    trySendCachedRequests(clientId);
}

From source file:org.apache.hyracks.storage.am.common.TreeIndexTestUtils.java

protected void addFilterField(IIndexTestContext ctx, MutablePair<ITupleReference, ITupleReference> minMax)
        throws HyracksDataException {
    //Duplicate the PK field as a filter field at the end of the tuple to be inserted.
    int filterField = ctx.getFieldCount();
    ITupleReference currTuple = ctx.getTuple();
    ArrayTupleBuilder filterBuilder = new ArrayTupleBuilder(1);
    filterBuilder.addField(currTuple.getFieldData(filterField), currTuple.getFieldStart(filterField),
            currTuple.getFieldLength(filterField));
    IBinaryComparator comparator = ctx.getComparatorFactories()[0].createBinaryComparator();
    ArrayTupleReference filterOnlyTuple = new ArrayTupleReference();
    filterOnlyTuple.reset(filterBuilder.getFieldEndOffsets(), filterBuilder.getByteArray());
    if (minMax == null) {
        minMax = MutablePair.of(filterOnlyTuple, filterOnlyTuple);
    } else if (compareFilterTuples(minMax.getLeft(), filterOnlyTuple, comparator) > 0) {
        minMax.setLeft(filterOnlyTuple);
    } else if (compareFilterTuples(minMax.getRight(), filterOnlyTuple, comparator) < 0) {
        minMax.setRight(filterOnlyTuple);
    }//w  w  w.ja  v a2  s.  c om
}

From source file:org.apache.hyracks.storage.am.lsm.btree.LSMBTreeFilterMergeTestDriver.java

@Override
protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType,
        ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey,
        ITupleReference prefixHighKey) throws Exception {
    OrderedIndexTestContext ctx = createTestContext(fieldSerdes, numKeys, leafType, true);
    ctx.getIndex().create();//from  w w w.j a  va  2  s  . co  m
    ctx.getIndex().activate();
    // Start off with one tree bulk loaded.
    // We assume all fieldSerdes are of the same type. Check the first one
    // to determine which field types to generate.
    if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
        orderedIndexTestUtils.bulkLoadIntTuples(ctx, numTuplesToInsert, true, getRandom());
    } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
        orderedIndexTestUtils.bulkLoadStringTuples(ctx, numTuplesToInsert, true, getRandom());
    }

    int maxTreesToMerge = AccessMethodTestsConfig.LSM_BTREE_MAX_TREES_TO_MERGE;
    ILSMIndexAccessor accessor = (ILSMIndexAccessor) ctx.getIndexAccessor();
    IBinaryComparator comp = ctx.getComparatorFactories()[0].createBinaryComparator();
    for (int i = 0; i < maxTreesToMerge; i++) {
        int flushed = 0;
        for (; flushed < i; flushed++) {
            Pair<ITupleReference, ITupleReference> minMax = null;
            if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
                minMax = orderedIndexTestUtils.insertIntTuples(ctx, numTuplesToInsert, true, getRandom());
            } else {
                minMax = orderedIndexTestUtils.insertStringTuples(ctx, numTuplesToInsert, true, getRandom());
            }
            if (minMax != null) {
                ILSMComponentFilter f = ((LSMBTree) ctx.getIndex()).getCurrentMemoryComponent()
                        .getLSMComponentFilter();
                Pair<ITupleReference, ITupleReference> obsMinMax = filterToMinMax(f);
                Assert.assertEquals(0,
                        TreeIndexTestUtils.compareFilterTuples(obsMinMax.getLeft(), minMax.getLeft(), comp));
                Assert.assertEquals(0,
                        TreeIndexTestUtils.compareFilterTuples(obsMinMax.getRight(), minMax.getRight(), comp));
            }

            StubIOOperationCallback stub = new StubIOOperationCallback();
            BlockingIOOperationCallbackWrapper waiter = new BlockingIOOperationCallbackWrapper(stub);
            accessor.scheduleFlush(waiter);
            waiter.waitForIO();
            if (minMax != null) {
                Pair<ITupleReference, ITupleReference> obsMinMax = filterToMinMax(
                        stub.getLastNewComponent().getLSMComponentFilter());
                Assert.assertEquals(0,
                        TreeIndexTestUtils.compareFilterTuples(obsMinMax.getLeft(), minMax.getLeft(), comp));
                Assert.assertEquals(0,
                        TreeIndexTestUtils.compareFilterTuples(obsMinMax.getRight(), minMax.getRight(), comp));
            }
        }

        List<ILSMDiskComponent> flushedComponents = ((LSMBTree) ctx.getIndex()).getImmutableComponents();
        MutablePair<ITupleReference, ITupleReference> expectedMergeMinMax = null;
        for (ILSMDiskComponent f : flushedComponents) {
            Pair<ITupleReference, ITupleReference> componentMinMax = filterToMinMax(f.getLSMComponentFilter());
            if (expectedMergeMinMax == null) {
                expectedMergeMinMax = MutablePair.of(componentMinMax.getLeft(), componentMinMax.getRight());
            }
            if (TreeIndexTestUtils.compareFilterTuples(expectedMergeMinMax.getLeft(), componentMinMax.getLeft(),
                    comp) > 0) {
                expectedMergeMinMax.setLeft(componentMinMax.getLeft());
            }
            if (TreeIndexTestUtils.compareFilterTuples(expectedMergeMinMax.getRight(),
                    componentMinMax.getRight(), comp) < 0) {
                expectedMergeMinMax.setRight(componentMinMax.getRight());
            }
        }
        accessor.scheduleMerge(NoOpIOOperationCallback.INSTANCE,
                ((LSMBTree) ctx.getIndex()).getImmutableComponents());

        flushedComponents = ((LSMBTree) ctx.getIndex()).getImmutableComponents();
        Pair<ITupleReference, ITupleReference> mergedMinMax = filterToMinMax(
                flushedComponents.get(0).getLSMComponentFilter());
        Assert.assertEquals(0, TreeIndexTestUtils.compareFilterTuples(expectedMergeMinMax.getLeft(),
                mergedMinMax.getLeft(), comp));
        Assert.assertEquals(0, TreeIndexTestUtils.compareFilterTuples(expectedMergeMinMax.getRight(),
                mergedMinMax.getRight(), comp));

        orderedIndexTestUtils.checkPointSearches(ctx);
        orderedIndexTestUtils.checkScan(ctx);
        orderedIndexTestUtils.checkDiskOrderScan(ctx);
        orderedIndexTestUtils.checkRangeSearch(ctx, lowKey, highKey, true, true);
        if (prefixLowKey != null && prefixHighKey != null) {
            orderedIndexTestUtils.checkRangeSearch(ctx, prefixLowKey, prefixHighKey, true, true);
        }
    }
    ctx.getIndex().deactivate();
    ctx.getIndex().destroy();
}

From source file:org.apache.pulsar.broker.service.Consumer.java

/**
 * Dispatch a list of entries to the consumer. <br/>
 * <b>It is also responsible to release entries data and recycle entries object.</b>
 *
 * @return a promise that can be use to track when all the data has been written into the socket
 *///from  w ww. j a  v  a  2  s .c  o  m
public Pair<ChannelPromise, Integer> sendMessages(final List<Entry> entries) {
    final ChannelHandlerContext ctx = cnx.ctx();
    final MutablePair<ChannelPromise, Integer> sentMessages = new MutablePair<ChannelPromise, Integer>();
    final ChannelPromise writePromise = ctx.newPromise();
    sentMessages.setLeft(writePromise);
    if (entries.isEmpty()) {
        if (log.isDebugEnabled()) {
            log.debug("[{}] List of messages is empty, triggering write future immediately for consumerId {}",
                    subscription, consumerId);
        }
        writePromise.setSuccess();
        sentMessages.setRight(0);
        return sentMessages;
    }

    try {
        sentMessages.setRight(updatePermitsAndPendingAcks(entries));
    } catch (PulsarServerException pe) {
        log.warn("[{}] [{}] consumer doesn't support batch-message {}", subscription, consumerId,
                cnx.getRemoteEndpointProtocolVersion());

        subscription.markTopicWithBatchMessagePublished();
        sentMessages.setRight(0);
        // disconnect consumer: it will update dispatcher's availablePermits and resend pendingAck-messages of this
        // consumer to other consumer
        disconnect();
        return sentMessages;
    }

    ctx.channel().eventLoop().execute(() -> {
        for (int i = 0; i < entries.size(); i++) {
            Entry entry = entries.get(i);
            PositionImpl pos = (PositionImpl) entry.getPosition();
            MessageIdData.Builder messageIdBuilder = MessageIdData.newBuilder();
            MessageIdData messageId = messageIdBuilder.setLedgerId(pos.getLedgerId())
                    .setEntryId(pos.getEntryId()).build();

            ByteBuf metadataAndPayload = entry.getDataBuffer();
            // increment ref-count of data and release at the end of process: so, we can get chance to call entry.release
            metadataAndPayload.retain();
            // skip checksum by incrementing reader-index if consumer-client doesn't support checksum verification
            if (cnx.getRemoteEndpointProtocolVersion() < ProtocolVersion.v6.getNumber()) {
                readChecksum(metadataAndPayload);
            }

            if (log.isDebugEnabled()) {
                log.debug("[{}] Sending message to consumerId {}, entry id {}", subscription, consumerId,
                        pos.getEntryId());
            }

            // We only want to pass the "real" promise on the last entry written
            ChannelPromise promise = ctx.voidPromise();
            if (i == (entries.size() - 1)) {
                promise = writePromise;
            }
            ctx.write(Commands.newMessage(consumerId, messageId, metadataAndPayload), promise);
            messageId.recycle();
            messageIdBuilder.recycle();
            entry.release();
        }

        ctx.flush();
    });

    return sentMessages;
}