Example usage for com.google.common.collect Range closedOpen

List of usage examples for com.google.common.collect Range closedOpen

Introduction

In this page you can find the example usage for com.google.common.collect Range closedOpen.

Prototype

public static <C extends Comparable<?>> Range<C> closedOpen(C lower, C upper) 

Source Link

Document

Returns a range that contains all values greater than or equal to lower and strictly less than upper .

Usage

From source file:org.eclipse.fx.ui.controls.styledtext.internal.VerticalLineFlow.java

public void update(RangeSet<Integer> r) {
    prepareNodes(r.subRangeSet(this.visibleLines.get())
            .subRangeSet(Range.closedOpen(Integer.valueOf(0), Integer.valueOf(this.numberOfLines.get()))));
}

From source file:eu.trentorise.opendata.semtext.SemText.java

/**
 * Returns a new SemText with all the terms matching the provided regex text
 * deleted./*from w  ww .  j a va  2s  . c o m*/
 *
 */
public SemText deleteTerms(Pattern pattern) {
    checkNotNull(pattern);
    checkNotEmpty(pattern.pattern(), "Pattern can't be empty!!");

    Matcher m = pattern.matcher(text);
    List ranges = new ArrayList();
    while (m.find()) {
        ranges.add(Range.closedOpen(m.start(), m.end()));
    }
    return deleteTerms(ranges);
}

From source file:org.cinchapi.concourse.server.model.Ranges.java

/**
 * Return a new {@link Range} that is the merger (e.g. union) of {@code a}
 * and {@code b}. The new {@link Range} maintains both the lower and higher
 * endpoint/bound between the two inputs.
 * // ww w  . j a v  a  2s . co m
 * @param a
 * @param b
 * @return the union of {@code a} and {@code b}
 */
public static Range<Value> merge(Range<Value> a, Range<Value> b) {
    if (a.isConnected(b)) {
        boolean aStart = compareToLower(a, b) < 0;
        boolean aEnd = compareToUpper(a, b) > 0;
        boolean lower = getLowerBoundType(aStart ? a : b) == BoundType.CLOSED;
        boolean upper = getUpperBoundType(aStart ? a : b) == BoundType.CLOSED;
        if (lower && upper) {
            return Range.closed(getLowerEndpoint(aStart ? a : b), getUpperEndpoint(aEnd ? a : b));
        } else if (!lower && upper) {
            return Range.closedOpen(getLowerEndpoint(aStart ? a : b), getUpperEndpoint(aEnd ? a : b));
        } else if (lower && !upper) {
            return Range.openClosed(getLowerEndpoint(aStart ? a : b), getUpperEndpoint(aEnd ? a : b));
        } else {
            return Range.open(getLowerEndpoint(aStart ? a : b), getUpperEndpoint(aEnd ? a : b));
        }
    } else {
        return null;
    }
}

From source file:org.dishevelled.bio.feature.Gff3Record.java

/**
 * Return this GFF3 record as a 0-based coordinate system, closed open range.
 *
 * @return this GFF3 record as a 0-based coordinate system, closed open range
 *//*from w  w w.j a v a  2 s.  c  om*/
public Range<Long> toRange() {
    return Range.closedOpen(start, end);
}

From source file:org.openmhealth.shimmer.common.controller.DataPointSearchController.java

public Range<OffsetDateTime> asRange(OffsetDateTime onOrAfterDateTime, OffsetDateTime beforeDateTime) {

    if (onOrAfterDateTime != null && beforeDateTime != null) {
        return Range.closedOpen(onOrAfterDateTime, beforeDateTime);
    }//from   w w w  .  ja v a 2  s .  c om

    if (onOrAfterDateTime != null) {
        return Range.atLeast(onOrAfterDateTime);
    }

    else if (beforeDateTime != null) {
        return Range.lessThan(beforeDateTime);
    }

    return Range.all();
}

From source file:org.openmhealth.dsu.controller.DataPointController.java

/**
 * Reads data points./*from   w w  w  . ja  v a2  s.  co m*/
 *
 * @param schemaNamespace the namespace of the schema the data points conform to
 * @param schemaName the name of the schema the data points conform to
 * @param schemaVersion the version of the schema the data points conform to
 * @param createdOnOrAfter the earliest creation timestamp of the data points to return, inclusive
 * @param createdBefore the latest creation timestamp of the data points to return, exclusive
 * @param offset the number of data points to skip
 * @param limit the number of data points to return
 * @return a list of matching data points
 */
// TODO confirm if HEAD handling needs anything additional
// only allow clients with read scope to read data points
@PreAuthorize("#oauth2.clientHasRole('" + CLIENT_ROLE + "') and #oauth2.hasScope('" + DATA_POINT_READ_SCOPE
        + "')")
// TODO look into any meaningful @PostAuthorize filtering
@RequestMapping(value = "/dataPoints", method = { HEAD, GET }, produces = APPLICATION_JSON_VALUE)
public @ResponseBody ResponseEntity<Iterable<DataPoint>> readDataPoints(
        @RequestParam(value = SCHEMA_NAMESPACE_PARAMETER) final String schemaNamespace,
        @RequestParam(value = SCHEMA_NAME_PARAMETER) final String schemaName,
        // TODO make this optional and update all associated code
        @RequestParam(value = SCHEMA_VERSION_PARAMETER) final String schemaVersion,
        // TODO replace with Optional<> in Spring MVC 4.1
        @RequestParam(value = CREATED_ON_OR_AFTER_PARAMETER, required = false) final OffsetDateTime createdOnOrAfter,
        @RequestParam(value = CREATED_BEFORE_PARAMETER, required = false) final OffsetDateTime createdBefore,
        @RequestParam(value = RESULT_OFFSET_PARAMETER, defaultValue = "0") final Integer offset,
        @RequestParam(value = RESULT_LIMIT_PARAMETER, defaultValue = DEFAULT_RESULT_LIMIT) final Integer limit,
        Authentication authentication) {

    // TODO add validation or explicitly comment that this is handled using exception translators

    // determine the user associated with the access token to restrict the search accordingly
    String endUserId = getEndUserId(authentication);

    DataPointSearchCriteria searchCriteria = new DataPointSearchCriteria(endUserId, schemaNamespace, schemaName,
            schemaVersion);

    if (createdOnOrAfter != null && createdBefore != null) {
        searchCriteria.setCreationTimestampRange(Range.closedOpen(createdOnOrAfter, createdBefore));
    } else if (createdOnOrAfter != null) {
        searchCriteria.setCreationTimestampRange(Range.atLeast(createdOnOrAfter));
    } else if (createdBefore != null) {
        searchCriteria.setCreationTimestampRange(Range.lessThan(createdBefore));
    }

    Iterable<DataPoint> dataPoints = dataPointService.findBySearchCriteria(searchCriteria, offset, limit);

    HttpHeaders headers = new HttpHeaders();

    // FIXME add pagination headers
    // headers.set("Next");
    // headers.set("Previous");

    return new ResponseEntity<>(dataPoints, headers, OK);
}

From source file:edu.mit.streamjit.impl.compiler2.SubsetBiasAverageAllocationStrategy.java

@Override
public void allocateGroup(ActorGroup group, Range<Integer> iterations, List<Core> cores, Configuration config) {
    int numCores = 0, biasCount = 0;
    List<ImmutableList<? extends Integer>> coreOrders = new ArrayList<>();
    float bias = 0;
    for (Actor a : group.actors()) {
        int id = a.id();
        numCores += config.getParameter("Group" + id + "CoreCount", Configuration.IntParameter.class)
                .getValue();// w  w w.  ja v a 2s.  co  m
        Configuration.PermutationParameter<Integer> coreOrderParam = config.getParameter(
                "Group" + id + "CoreOrder", Configuration.PermutationParameter.class, Integer.class);
        coreOrders.add(coreOrderParam.getUniverse());
        int ourBiasCount = config.getParameter("Group" + id + "BiasCount", Configuration.IntParameter.class)
                .getValue();
        biasCount += Math.min(ourBiasCount, numCores - 1);
        bias += config.getParameter("Group" + id + "Bias", Configuration.FloatParameter.class).getValue();
    }
    numCores = IntMath.divide(numCores, group.actors().size(), RoundingMode.CEILING);
    biasCount = IntMath.divide(biasCount, group.actors().size(), RoundingMode.FLOOR);
    bias /= group.actors().size();
    //Transpose coreOrders.
    List<Integer> coreOrder = new ArrayList<>();
    for (int i = 0; i < coreOrders.get(0).size(); ++i)
        for (int j = 0; j < coreOrders.size(); ++j)
            coreOrder.add(coreOrders.get(j).get(i));
    //Remove duplicates preserving order.
    coreOrder = new ArrayList<>(new LinkedHashSet<>(coreOrder));

    List<Core> subset = new ArrayList<>(numCores);
    for (int i = 0; i < coreOrder.size() && subset.size() < numCores; ++i)
        if (coreOrder.get(i) < cores.size())
            subset.add(cores.get(coreOrder.get(i)));
    List<Core> biasSubset = new ArrayList<>(biasCount);
    while (biasSubset.size() < biasCount)
        biasSubset.add(subset.remove(0));

    float deficitFraction = biasCount * (1 - bias) / numCores, surplusFraction = 1 - deficitFraction;
    assert deficitFraction >= 0 && surplusFraction >= 0 : String.format("%d %d %f -> %f %f", numCores,
            biasCount, bias, deficitFraction, surplusFraction);
    iterations = iterations.canonical(DiscreteDomain.integers());
    int totalIterations = iterations.upperEndpoint() - iterations.lowerEndpoint();
    int biasIterations = (int) (totalIterations * deficitFraction);
    //We pass a null config to ensure we don't interfere with the other strategy.
    if (biasCount > 0)
        new FullDataParallelAllocationStrategy(biasCount).allocateGroup(group,
                Range.closedOpen(iterations.lowerEndpoint(), iterations.lowerEndpoint() + biasIterations),
                biasSubset, null);
    if (numCores - biasCount > 0)
        new FullDataParallelAllocationStrategy(numCores - biasCount).allocateGroup(group,
                Range.closedOpen(iterations.lowerEndpoint() + biasIterations, iterations.upperEndpoint()),
                subset, null);
}

From source file:org.obm.opush.windowing.WindowingStepdefs.java

@Given("user has (\\d+) new elements, (\\d+) changes and (\\d+) deletions$")
public void newBunchOfEmails(long adds, long changes, long deletions) {
    EmailChanges.Builder newInbox = EmailChanges.builder();
    for (long uid : ContiguousSet.create(Range.closedOpen(0l, adds), DiscreteDomain.longs())) {
        newInbox.addition(Email.builder().uid(uid).build());
    }//from ww  w  . ja v  a  2s.c  o  m
    for (long uid : ContiguousSet.create(Range.closedOpen(adds, adds + changes), DiscreteDomain.longs())) {
        newInbox.change(Email.builder().uid(uid).build());
    }
    for (long uid : ContiguousSet.create(Range.closedOpen(adds + changes, adds + changes + deletions),
            DiscreteDomain.longs())) {
        newInbox.deletion(Email.builder().uid(uid).build());
    }
    inbox = newInbox.build();
}

From source file:org.eclipse.fx.ui.controls.styledtext.internal.VFlow.java

private void onNumberOfLinesChange(Observable x, Number o, Number n) {
    if (n.intValue() > o.intValue()) {
        RangeSet<Integer> toUpdate = TreeRangeSet.create();
        toUpdate.add(Range.closedOpen(Integer.valueOf(o.intValue()), Integer.valueOf(n.intValue())));
        triggerUpdate(toUpdate);//from   w  ww . j  a  v  a2s. c  o  m
    } else if (n.intValue() < o.intValue()) {
        RangeSet<Integer> toRelease = TreeRangeSet.create();
        toRelease.add(Range.closed(Integer.valueOf(n.intValue()), Integer.valueOf(o.intValue())));
        triggerRelease(toRelease);
    }
}

From source file:edu.cmu.lti.oaqa.framework.eval.passage.PassageMAPEvalAggregator.java

private float getAvgPsgMAP(List<Passage> docs, List<Passage> gs) {
    if (gs.size() == 0) {
        return 0;
    }//from   w  w w. j a  va  2 s.  c  o m
    int totalChars = 0;
    int overlapLength = 0;
    float sumPrecision = 0;
    int count = 0;
    Set<Passage> foundGoldTriplets = Sets.newHashSet();
    for (Passage doc : docs) {
        Range<Integer> docRange = Range.closedOpen(doc.getBegin(), doc.getEnd());
        totalChars += docRange.upperEndpoint() - docRange.lowerEndpoint();
        for (Passage g : gs) {
            if (!g.getUri().equals(doc.getUri()))
                continue;
            Range<Integer> gRange = Range.closedOpen(g.getBegin(), g.getEnd());
            if (!docRange.isConnected(gRange)) {
                continue;
            }
            Range<Integer> overlap = docRange.intersection(gRange);
            if (overlap.isEmpty()) {
                continue;
            }
            overlapLength += overlap.upperEndpoint() - overlap.lowerEndpoint();
            sumPrecision += (float) overlapLength / (float) totalChars;
            count++;
            foundGoldTriplets.add(g);
            break;
        }
    }
    int numZeros = Sets.difference(Sets.newHashSet(gs), foundGoldTriplets).size();
    return (float) sumPrecision / (float) (count + numZeros);
}