Example usage for com.google.common.collect Range lowerEndpoint

List of usage examples for com.google.common.collect Range lowerEndpoint

Introduction

In this page you can find the example usage for com.google.common.collect Range lowerEndpoint.

Prototype

public C lowerEndpoint() 

Source Link

Document

Returns the lower endpoint of this range.

Usage

From source file:net.sf.mzmine.modules.peaklistmethods.peakpicking.deconvolution.centwave.CentWaveDetector.java

/**
 * Do peak picking using xcms::findPeaks.centWave.
 * /*from ww  w  . j  a v a  2  s.  c om*/
 * @param scanTime
 *            retention times (for each scan).
 * @param intensity
 *            intensity values (for each scan).
 * @param mz
 *            fixed m/z value for EIC.
 * @param snrThreshold
 *            signal:noise ratio threshold.
 * @param peakWidth
 *            peak width range.
 * @param integrationMethod
 *            integration method.
 * @return a matrix with a row for each detected peak.
 * @throws RSessionWrapperException
 */
private static double[][] centWave(RSessionWrapper rSession, final double[] scanTime, final double[] intensity,
        final double mz, final double snrThreshold, final Range<Double> peakWidth,
        final PeakIntegrationMethod integrationMethod) throws RSessionWrapperException {

    LOG.finest("Detecting peaks.");

    final double[][] peaks;

    // Set vectors.
    rSession.assign("scantime", scanTime);
    rSession.assign("intensity", intensity);

    // Initialize.
    rSession.eval("mz <- " + mz);
    rSession.eval("numPoints <- length(intensity)");

    // Construct xcmsRaw object
    rSession.eval("xRaw <- new(\"xcmsRaw\")");
    rSession.eval("xRaw@tic <- intensity");
    rSession.eval("xRaw@scantime <- scantime * " + SECONDS_PER_MINUTE);
    rSession.eval("xRaw@scanindex <- 1:numPoints");
    rSession.eval("xRaw@env$mz <- rep(mz, numPoints)");
    rSession.eval("xRaw@env$intensity <- intensity");

    // Construct ROIs.
    rSession.eval("ROIs <- list()");
    int roi = 1;
    for (int start = 0; start < intensity.length; start++) {

        // Found non-zero section.
        if (intensity[start] > 0.0) {

            // Look for end.
            int end = start + 1;
            while (end < intensity.length && intensity[end] > 0.0) {

                end++;
            }

            // Add ROI to list.
            rSession.eval("ROIs[[" + roi + "]] <- list('scmin'=" + (start + 1) + ", 'scmax'=" + end
                    + ", 'mzmin'=mz, 'mzmax'=mz)");

            // Next ROI.
            start = end;
            roi++;

        }
    }

    // Do peak picking.
    final Object centWave = roi <= 1 ? null
            : (double[][]) rSession.collect(
                    "findPeaks.centWave(xRaw, ppm=0, mzdiff=0, verbose=TRUE" + ", peakwidth=c("
                            + peakWidth.lowerEndpoint() * SECONDS_PER_MINUTE + ", "
                            + peakWidth.upperEndpoint() * SECONDS_PER_MINUTE + ')' + ", snthresh="
                            + snrThreshold + ", integrate=" + integrationMethod.getIndex() + ", ROI.list=ROIs)",
                    false);

    peaks = (centWave == null) ? null : (double[][]) centWave;

    return peaks;
}

From source file:com.google.cloud.genomics.dataflow.pipelines.AnnotateVariants.java

@Override
public void processElement(DoFn<StreamVariantsRequest, KV<String, VariantAnnotation>>.ProcessContext c)
        throws Exception {
    Genomics genomics = GenomicsFactory.builder().build().fromOfflineAuth(auth);

    StreamVariantsRequest request = StreamVariantsRequest.newBuilder(c.element()).addAllCallSetIds(callSetIds)
            .build();//from   ww w  . j  ava 2 s  . c  om
    LOG.info("processing contig " + request);

    Iterator<StreamVariantsResponse> iter = VariantStreamIterator.enforceShardBoundary(auth, request,
            ShardBoundary.Requirement.STRICT, VARIANT_FIELDS);
    if (!iter.hasNext()) {
        LOG.info("region has no variants, skipping");
        return;
    }

    IntervalTree<Annotation> transcripts = retrieveTranscripts(genomics, request);
    ListMultimap<Range<Long>, Annotation> variantAnnotations = retrieveVariantAnnotations(genomics, request);

    Stopwatch stopwatch = Stopwatch.createStarted();
    int varCount = 0;
    while (iter.hasNext()) {
        Iterable<Variant> varIter = FluentIterable.from(iter.next().getVariantsList())
                .filter(VariantUtils.IS_SNP);
        for (Variant variant : varIter) {
            List<String> alleles = ImmutableList.<String>builder().addAll(variant.getAlternateBasesList())
                    .add(variant.getReferenceBases()).build();
            Range<Long> pos = Range.openClosed(variant.getStart(), variant.getEnd());
            for (String allele : alleles) {
                String outKey = Joiner.on(":").join(variant.getReferenceName(), variant.getStart(), allele,
                        variant.getId());
                for (Annotation match : variantAnnotations.get(pos)) {
                    if (allele.equals(match.getVariant().getAlternateBases())) {
                        // Exact match to a known variant annotation; straightforward join.
                        c.output(KV.of(outKey, match.getVariant()));
                    }
                }

                Iterator<Node<Annotation>> transcriptIter = transcripts
                        .overlappers(pos.lowerEndpoint().intValue(), pos.upperEndpoint().intValue() - 1); // Inclusive.
                while (transcriptIter.hasNext()) {
                    // Calculate an effect of this allele on the coding region of the given transcript.
                    Annotation transcript = transcriptIter.next().getValue();
                    VariantEffect effect = AnnotationUtils.determineVariantTranscriptEffect(variant.getStart(),
                            allele, transcript, getCachedTranscriptBases(genomics, transcript));
                    if (effect != null && !VariantEffect.SYNONYMOUS_SNP.equals(effect)) {
                        c.output(KV.of(outKey, new VariantAnnotation().setAlternateBases(allele).setType("SNP")
                                .setEffect(effect.toString()).setGeneId(transcript.getTranscript().getGeneId())
                                .setTranscriptIds(ImmutableList.of(transcript.getId()))));
                    }
                }
            }
            varCount++;
            if (varCount % 1e3 == 0) {
                LOG.info(String.format("read %d variants (%.2f / s)", varCount,
                        (double) varCount / stopwatch.elapsed(TimeUnit.SECONDS)));
            }
        }
    }
    LOG.info("finished reading " + varCount + " variants in " + stopwatch);
}

From source file:org.sonatype.nexus.content.internal.ContentServlet.java

/**
 * Handles a file response, all the conditional request cases, and eventually the content serving of the file item.
 *///from w w  w . j av  a 2  s .c o m
protected void doGetFile(final HttpServletRequest request, final HttpServletResponse response,
        final StorageFileItem file) throws ServletException, IOException {
    // ETag, in "shaved" form of {SHA1{e5c244520e897865709c730433f8b0c44ef271f1}} (without quotes)
    // or null if file does not have SHA1 (like Virtual) or generated items (as their SHA1 would correspond to template,
    // not to actual generated content).
    final String etag;
    if (!file.isContentGenerated() && !file.isVirtual()
            && file.getRepositoryItemAttributes().containsKey(StorageFileItem.DIGEST_SHA1_KEY)) {
        etag = "{SHA1{" + file.getRepositoryItemAttributes().get(StorageFileItem.DIGEST_SHA1_KEY) + "}}";
        // tag header ETag: "{SHA1{e5c244520e897865709c730433f8b0c44ef271f1}}", quotes are must by RFC
        response.setHeader("ETag", "\"" + etag + "\"");
    } else {
        etag = null;
    }

    response.setHeader("Content-Type", file.getMimeType());
    response.setDateHeader("Last-Modified", file.getModified());

    // content-length, if known
    if (file.getLength() != ContentLocator.UNKNOWN_LENGTH) {
        // Note: response.setContentLength Servlet API method uses ints (max 2GB file)!
        // TODO: apparently, some Servlet containers follow serlvet API and assume
        // contents can have 2GB max, so even this workaround below in inherently unsafe.
        // Jetty is checked, and supports this (uses long internally), but unsure for other containers
        response.setHeader("Content-Length", String.valueOf(file.getLength()));
    }

    // handle conditional GETs only for "static" content, actual content stored, not generated
    if (!file.isContentGenerated() && file.getResourceStoreRequest().getIfModifiedSince() != 0
            && file.getModified() <= file.getResourceStoreRequest().getIfModifiedSince()) {
        // this is a conditional GET using time-stamp
        response.setStatus(SC_NOT_MODIFIED);
    } else if (!file.isContentGenerated() && file.getResourceStoreRequest().getIfNoneMatch() != null
            && etag != null && file.getResourceStoreRequest().getIfNoneMatch().equals(etag)) {
        // this is a conditional GET using ETag
        response.setStatus(SC_NOT_MODIFIED);
    } else {
        final List<Range<Long>> ranges = getRequestedRanges(request, file.getLength());

        // pour the content, but only if needed (this method will be called even for HEAD reqs, but with content tossed
        // away), so be conservative as getting input stream involves locking etc, is expensive
        final boolean contentNeeded = "GET".equalsIgnoreCase(request.getMethod());
        if (ranges.isEmpty()) {
            if (contentNeeded) {
                webUtils.sendContent(file.getInputStream(), response);
            }
        } else if (ranges.size() > 1) {
            throw new ErrorStatusException(SC_NOT_IMPLEMENTED, "Not Implemented",
                    "Multiple ranges not yet supported.");
        } else {
            final Range<Long> range = ranges.get(0);
            if (!isRequestedRangeSatisfiable(file, range)) {
                response.setStatus(SC_REQUESTED_RANGE_NOT_SATISFIABLE);
                response.setHeader("Content-Length", "0");
                response.setHeader("Content-Range", "bytes */" + file.getLength());
                return;
            }
            final long bodySize = 1 + range.upperEndpoint() - range.lowerEndpoint();
            response.setStatus(SC_PARTIAL_CONTENT);
            response.setHeader("Content-Length", String.valueOf(bodySize));
            response.setHeader("Content-Range",
                    range.lowerEndpoint() + "-" + range.upperEndpoint() + "/" + file.getLength());
            if (contentNeeded) {
                try (final InputStream in = file.getInputStream()) {
                    in.skip(range.lowerEndpoint());
                    webUtils.sendContent(limit(in, bodySize), response);
                }
            }
        }
    }
}

From source file:edu.mit.streamjit.impl.compiler2.ActorGroup.java

/**
 * Returns a void->void MethodHandle that will run this ActorGroup for the
 * given iterations using the given ConcreteStorage instances.
 * @param iterations the range of iterations to run for
 * @param storage the storage being used
 * @return a void->void method handle
 *///ww  w.  j a va2 s. c o  m
public MethodHandle specialize(Range<Integer> iterations, Map<Storage, ConcreteStorage> storage,
        BiFunction<MethodHandle[], WorkerActor, MethodHandle> switchFactory, int unrollFactor,
        ImmutableTable<Actor, Integer, IndexFunctionTransformer> inputTransformers,
        ImmutableTable<Actor, Integer, IndexFunctionTransformer> outputTransformers) {
    //TokenActors are special.
    assert !isTokenGroup() : actors();

    Map<Actor, MethodHandle> withRWHandlesBound = bindActorsToStorage(iterations, storage, switchFactory,
            inputTransformers, outputTransformers);

    int totalIterations = iterations.upperEndpoint() - iterations.lowerEndpoint();
    unrollFactor = Math.min(unrollFactor, totalIterations);
    int unrolls = (totalIterations / unrollFactor);
    int unrollEndpoint = iterations.lowerEndpoint() + unrolls * unrollFactor;
    MethodHandle overall = Combinators.semicolon(
            makeGroupLoop(Range.closedOpen(iterations.lowerEndpoint(), unrollEndpoint), unrollFactor,
                    withRWHandlesBound),
            makeGroupLoop(Range.closedOpen(unrollEndpoint, iterations.upperEndpoint()), 1, withRWHandlesBound));
    return overall;
}

From source file:com.pingcap.tikv.predicates.ScanBuilder.java

private List<KeyRange> buildIndexScanKeyRange(TiTableInfo table, TiIndexInfo index,
        List<IndexRange> indexRanges) {
    requireNonNull(table, "Table cannot be null to encoding keyRange");
    requireNonNull(index, "Index cannot be null to encoding keyRange");
    requireNonNull(index, "indexRanges cannot be null to encoding keyRange");

    List<KeyRange> ranges = new ArrayList<>(indexRanges.size());

    for (IndexRange ir : indexRanges) {
        CodecDataOutput cdo = new CodecDataOutput();
        List<Object> values = ir.getAccessPoints();
        List<DataType> types = ir.getTypes();
        for (int i = 0; i < values.size(); i++) {
            Object v = values.get(i);
            DataType t = types.get(i);/*from ww w  .j  a va2 s .c  o m*/
            t.encode(cdo, DataType.EncodeType.KEY, v);
        }

        byte[] pointsData = cdo.toBytes();

        cdo.reset();
        Range r = ir.getRange();
        byte[] lPointKey;
        byte[] uPointKey;

        byte[] lKey;
        byte[] uKey;
        if (r == null) {
            lPointKey = pointsData;
            uPointKey = KeyUtils.prefixNext(lPointKey.clone());

            lKey = new byte[0];
            uKey = new byte[0];
        } else {
            lPointKey = pointsData;
            uPointKey = pointsData;

            DataType type = ir.getRangeType();
            if (!r.hasLowerBound()) {
                // -INF
                type.encodeMinValue(cdo);
                lKey = cdo.toBytes();
            } else {
                Object lb = r.lowerEndpoint();
                type.encode(cdo, DataType.EncodeType.KEY, lb);
                lKey = cdo.toBytes();
                if (r.lowerBoundType().equals(BoundType.OPEN)) {
                    lKey = KeyUtils.prefixNext(lKey);
                }
            }

            cdo.reset();
            if (!r.hasUpperBound()) {
                // INF
                type.encodeMaxValue(cdo);
                uKey = cdo.toBytes();
            } else {
                Object ub = r.upperEndpoint();
                type.encode(cdo, DataType.EncodeType.KEY, ub);
                uKey = cdo.toBytes();
                if (r.upperBoundType().equals(BoundType.CLOSED)) {
                    uKey = KeyUtils.prefixNext(lKey);
                }
            }

            cdo.reset();
        }
        TableCodec.writeIndexSeekKey(cdo, table.getId(), index.getId(), lPointKey, lKey);

        ByteString lbsKey = ByteString.copyFrom(cdo.toBytes());

        cdo.reset();
        TableCodec.writeIndexSeekKey(cdo, table.getId(), index.getId(), uPointKey, uKey);
        ByteString ubsKey = ByteString.copyFrom(cdo.toBytes());

        ranges.add(KeyRange.newBuilder().setStart(lbsKey).setEnd(ubsKey).build());
    }

    if (ranges.isEmpty()) {
        ranges.add(INDEX_FULL_RANGE);
    }
    return ranges;
}

From source file:fr.openwide.core.export.excel.AbstractExcelTableExport.java

/**
 * Ajoute les en-ttes dans la feuille de calcul et cache les colonnes qui doivent l'tre.
 * /*from   w ww  .j  a va 2 s  .  co  m*/
 * @param sheet feuille de calcul
 * @param rowIndex numro de la ligne
 * @param columnInfos RangeMap contenant les informations de colonnes (valeurs) et les index sur auxquelles s'appliquent ces colonnes (cls).
 *                    Les "colonnes" s'tendant sur plus d'un index seront automatiquement fusionnes.
 */
protected void addHeadersToSheet(Sheet sheet, int rowIndex, RangeMap<Integer, ColumnInformation> columnInfos) {
    Row rowHeader = sheet.createRow(rowIndex);
    for (Map.Entry<Range<Integer>, ColumnInformation> entry : columnInfos.asMapOfRanges().entrySet()) {
        Range<Integer> range = entry.getKey();
        ColumnInformation columnInformation = entry.getValue();

        addHeaderCell(rowHeader, range.lowerEndpoint(), getColumnLabel(columnInformation.getHeaderKey()));

        for (Integer columnIndex : ContiguousSet.create(range, DiscreteDomain.integers())) {
            sheet.setColumnHidden(columnIndex, columnInformation.isHidden());
        }

        int beginIndex = range.lowerEndpoint();
        int endIndex = range.upperEndpoint();
        if (beginIndex != endIndex) {
            sheet.addMergedRegion(new CellRangeAddress(rowIndex, rowIndex, beginIndex, endIndex));
        }
    }
}

From source file:com.nimbits.server.io.BlobStoreImpl.java

@Override
public List<Value> getDataSegment(final Entity entity, final Range<Date> timespan) {
    PersistenceManager pm = persistenceManagerFactory.getPersistenceManager();
    try {//from w w w . j av a2 s .c o  m
        final List<Value> retObj = new ArrayList<Value>();
        final Query q = pm.newQuery(ValueBlobStoreEntity.class);
        q.setFilter("entity == k && minTimestamp <= et && maxTimestamp >= st ");
        q.declareParameters("String k, Long et, Long st");
        q.setOrdering("minTimestamp desc");

        final Iterable<ValueBlobStore> result = (Iterable<ValueBlobStore>) q.execute(entity.getKey(),
                timespan.upperEndpoint().getTime(), timespan.lowerEndpoint().getTime());
        for (final ValueBlobStore e : result) { //todo break out of loop when range is met
            if (validateOwnership(entity, e)) {
                List<Value> values = readValuesFromFile(e.getBlobKey());
                for (final Value vx : values) {
                    if (timespan.contains(vx.getTimestamp())) {
                        retObj.add(vx);

                    }
                }
            }
        }
        return retObj;
    } finally {
        pm.close();
    }
}

From source file:edu.mit.streamjit.impl.compiler2.CompositionAllocationStrategy.java

@Override
public void allocateGroup(ActorGroup group, Range<Integer> iterations, List<Core> cores, Configuration config) {
    if (group.isStateful()) {
        int minStatefulId = Integer.MAX_VALUE;
        for (Actor a : group.actors())
            if (a instanceof WorkerActor && ((WorkerActor) a).archetype().isStateful())
                minStatefulId = Math.min(minStatefulId, a.id());
        Configuration.SwitchParameter<Integer> param = config.getParameter("Group" + minStatefulId + "Core",
                Configuration.SwitchParameter.class, Integer.class);
        cores.get(param.getValue() % cores.size()).allocate(group, iterations);
        return;/*from   www. ja va  2s.com*/
    }

    Configuration.CompositionParameter param = config.getParameter("Group" + group.id() + "Cores",
            Configuration.CompositionParameter.class);
    assert iterations.lowerBoundType() == BoundType.CLOSED && iterations.upperBoundType() == BoundType.OPEN;
    int totalAvailable = iterations.upperEndpoint() - iterations.lowerEndpoint();
    int[] allocations = new int[cores.size()];
    int totalAllocated = 0;
    for (int i = 0; i < param.getLength() && i < allocations.length; ++i) {
        int allocation = DoubleMath.roundToInt(param.getValue(i) * totalAvailable, RoundingMode.HALF_EVEN);
        allocations[i] = allocation;
        totalAllocated += allocation;
    }
    //If we allocated more than we have, remove from the cores with the least.
    //Need a loop here because we might not have enough on the least core.
    while (totalAllocated > totalAvailable) {
        int least = Ints.indexOf(allocations, Ints.max(allocations));
        for (int i = 0; i < allocations.length; ++i)
            if (allocations[i] > 0 && allocations[i] < allocations[least])
                least = i;
        int toRemove = Math.min(allocations[least], totalAllocated - totalAvailable);
        allocations[least] -= toRemove;
        totalAllocated -= toRemove;
    }
    //If we didn't allocate enough, allocate on the cores with the most.
    if (totalAllocated < totalAvailable) {
        int most = Ints.indexOf(allocations, Ints.min(allocations));
        for (int i = 0; i < allocations.length; ++i)
            if (allocations[i] > allocations[most])
                most = i;
        allocations[most] += totalAvailable - totalAllocated;
        totalAllocated += totalAvailable - totalAllocated;
    }
    assert totalAllocated == totalAvailable : totalAllocated + " " + totalAvailable;

    int lower = iterations.lowerEndpoint();
    for (int i = 0; i < allocations.length; ++i)
        if (allocations[i] > 0) {
            cores.get(i).allocate(group, Range.closedOpen(lower, lower + allocations[i]));
            lower += allocations[i];
        }
}

From source file:jetbrains.jetpad.hybrid.BaseHybridSynchronizer.java

private CellTrait createTargetTrait() {
    return new CellTrait() {
        @Override//w ww. java2s.  co  m
        public Object get(Cell cell, CellTraitPropertySpec<?> spec) {
            if (spec == HYBRID_SYNCHRONIZER) {
                return BaseHybridSynchronizer.this;
            }
            if (spec == CellStateHandler.PROPERTY) {
                return getCellStateHandler();
            }

            return super.get(cell, spec);
        }

        @Override
        public void onKeyPressed(Cell cell, KeyEvent event) {
            Cell focusedCell = cell.getContainer().focusedCell.get();
            if (myTargetList.contains(focusedCell)) {
                Cell currentCell = cell.getContainer().focusedCell.get();
                if (!hasSelection()) {
                    if (event.is(KeyStrokeSpecs.SELECT_UP) && currentCell != null) {
                        mySelectionSupport.select(currentCell, currentCell);
                        event.consume();
                    }
                } else {
                    Range<Integer> currentRange = selection();
                    if (event.is(KeyStrokeSpecs.SELECT_UP)) {
                        ParseNode parseNode = myTokenListEditor.getParseNode();
                        if (parseNode != null) {
                            if (!currentRange.equals(parseNode.getRange())) {
                                ParseNode node = ParseNodes.findForRange(parseNode, currentRange);
                                ParseNode parentNode = ParseNodes.nonSameRangeParent(node);
                                if (parentNode != null) {
                                    select(parentNode.getRange());
                                    event.consume();
                                }
                            }
                        } else {
                            if (!currentRange.equals(Range.closed(0, tokens().size()))) {
                                select(Range.closed(0, tokens().size()));
                                event.consume();
                            }
                        }
                    }

                    if (event.is(KeyStrokeSpecs.SELECT_DOWN)) {
                        ParseNode parseNode = myTokenListEditor.getParseNode();
                        if (parseNode != null) {
                            ParseNode node = ParseNodes.findForRange(parseNode, currentRange);
                            ParseNode childNode = ParseNodes.nonSameRangeChild(node,
                                    myTargetList.indexOf(mySelectionSupport.currentCell()));
                            if (childNode != null) {
                                select(childNode.getRange());
                                event.consume();
                                return;
                            }
                        }

                        if (!mySelectionSupport.isCurrentCompletelySelected()) {
                            mySelectionSupport.clearSelection();
                            event.consume();
                        }
                    }
                }
            }
            super.onKeyPressed(cell, event);
        }

        @Override
        public void onCopy(Cell cell, CopyCutEvent event) {
            if (canCopy()) {
                event.consume(copy());
                return;
            }
            super.onCopy(cell, event);
        }

        @Override
        public void onCut(Cell cell, CopyCutEvent event) {
            if (canCut()) {
                ClipboardContent content = cut();
                myTokensEditPostProcessor.afterTokensEdit(tokens(), property().get());
                event.consume(content);
                return;
            }
            super.onCut(cell, event);
        }

        @Override
        public void onPaste(Cell cell, PasteEvent event) {
            if (canPaste(event.getContent())) {
                paste(event.getContent());
                myTokensEditPostProcessor.afterTokensEdit(tokens(), property().get());
                event.consume();
                return;
            }
            super.onPaste(cell, event);
        }

        private boolean canPaste(ClipboardContent content) {
            return content.isSupported(TOKENS_CONTENT);
        }

        private void paste(ClipboardContent content) {
            List<Token> tokens = content.get(TOKENS_CONTENT);
            Cell currentCell = mySelectionSupport.currentCell();

            int targetIndex;
            if (currentCell != null) {
                int currentCellIndex = myTargetList.indexOf(currentCell);
                boolean home = Positions.isHomePosition(currentCell);
                boolean end = Positions.isEndPosition(currentCell);
                if (home && end) {
                    // One-char token which allows editing at only one side
                    if (currentCell instanceof TextTokenCell && ((TextTokenCell) currentCell).noSpaceToLeft()) {
                        targetIndex = currentCellIndex + 1;
                    } else {
                        targetIndex = currentCellIndex;
                    }
                } else if (home) {
                    targetIndex = currentCellIndex;
                } else {
                    targetIndex = currentCellIndex + 1;
                }
            } else {
                targetIndex = 0;
            }

            myTokenListEditor.tokens.addAll(targetIndex, tokens);
            myTokenListEditor.updateToPrintedTokens();
            tokenOperations().select(targetIndex + tokens.size() - 1, LAST).run();
        }

        private boolean canCopy() {
            return hasSelection();
        }

        private ClipboardContent copy() {
            final Range<Integer> selection = selection();
            final List<Token> copiedTokens = new ArrayList<>(
                    selection.upperEndpoint() - selection.lowerEndpoint());
            for (Token token : tokens().subList(selection.lowerEndpoint(), selection.upperEndpoint())) {
                copiedTokens.add(token.copy());
            }
            return new ClipboardContent() {
                @Override
                public boolean isSupported(ContentKind<?> kind) {
                    return kind == TOKENS_CONTENT;
                }

                @Override
                public <T> T get(ContentKind<T> kind) {
                    if (kind == TOKENS_CONTENT) {
                        List<Token> result = new ArrayList<>(copiedTokens.size());
                        for (Token token : copiedTokens) {
                            result.add(token.copy());
                        }
                        return (T) Collections.unmodifiableList(result);
                    }
                    return null;
                }

                @Override
                public String toString() {
                    try {
                        return TokenUtil.getText(copiedTokens);
                    } catch (UnsupportedOperationException e) {
                        return super.toString();
                    }
                }
            };
        }

        private boolean canCut() {
            return hasSelection();
        }

        private ClipboardContent cut() {
            ClipboardContent result = copy();
            clearSelection();
            return result;
        }
    };
}

From source file:org.apache.kylin.storage.hbase.ii.coprocessor.endpoint.IIEndpoint.java

private Scan prepareScan(IIProtos.IIRequest request, HRegion region) throws IOException {
    Scan scan = new Scan();

    scan.addColumn(IIDesc.HBASE_FAMILY_BYTES, IIDesc.HBASE_QUALIFIER_BYTES);
    scan.addColumn(IIDesc.HBASE_FAMILY_BYTES, IIDesc.HBASE_DICTIONARY_BYTES);

    if (request.hasTsRange()) {
        Range<Long> tsRange = (Range<Long>) SerializationUtils
                .deserialize(HBaseZeroCopyByteString.zeroCopyGetBytes(request.getTsRange()));
        byte[] regionStartKey = region.getStartKey();
        if (!ArrayUtils.isEmpty(regionStartKey)) {
            shard = BytesUtil.readUnsigned(regionStartKey, 0, IIKeyValueCodec.SHARD_LEN);
        } else {/*from www .j  a va  2  s .  co m*/
            shard = 0;
        }
        logger.info("Start key of the region is: " + BytesUtil.toReadableText(regionStartKey)
                + ", making shard to be :" + shard);

        if (tsRange.hasLowerBound()) {
            //differentiate GT and GTE seems not very beneficial
            Preconditions.checkArgument(shard != -1, "Shard is -1!");
            long tsStart = tsRange.lowerEndpoint();
            logger.info("ts start is " + tsStart);

            byte[] idealStartKey = new byte[IIKeyValueCodec.SHARD_LEN + IIKeyValueCodec.TIMEPART_LEN];
            BytesUtil.writeUnsigned(shard, idealStartKey, 0, IIKeyValueCodec.SHARD_LEN);
            BytesUtil.writeLong(tsStart, idealStartKey, IIKeyValueCodec.SHARD_LEN,
                    IIKeyValueCodec.TIMEPART_LEN);
            logger.info("ideaStartKey is(readable) :" + BytesUtil.toReadableText(idealStartKey));
            Result result = region.getClosestRowBefore(idealStartKey, IIDesc.HBASE_FAMILY_BYTES);
            if (result != null) {
                byte[] actualStartKey = Arrays.copyOf(result.getRow(),
                        IIKeyValueCodec.SHARD_LEN + IIKeyValueCodec.TIMEPART_LEN);
                scan.setStartRow(actualStartKey);
                logger.info("The start key is set to " + BytesUtil.toReadableText(actualStartKey));
            } else {
                logger.info("There is no key before ideaStartKey so ignore tsStart");
            }
        }

        if (tsRange.hasUpperBound()) {
            //differentiate LT and LTE seems not very beneficial
            Preconditions.checkArgument(shard != -1, "Shard is -1");
            long tsEnd = tsRange.upperEndpoint();
            logger.info("ts end is " + tsEnd);

            byte[] actualEndKey = new byte[IIKeyValueCodec.SHARD_LEN + IIKeyValueCodec.TIMEPART_LEN];
            BytesUtil.writeUnsigned(shard, actualEndKey, 0, IIKeyValueCodec.SHARD_LEN);
            BytesUtil.writeLong(tsEnd + 1, actualEndKey, IIKeyValueCodec.SHARD_LEN,
                    IIKeyValueCodec.TIMEPART_LEN);//notice +1 here
            scan.setStopRow(actualEndKey);
            logger.info("The stop key is set to " + BytesUtil.toReadableText(actualEndKey));
        }
    }

    return scan;
}