Example usage for com.google.common.collect Lists reverse

List of usage examples for com.google.common.collect Lists reverse

Introduction

In this page you can find the example usage for com.google.common.collect Lists reverse.

Prototype

@CheckReturnValue
public static <T> List<T> reverse(List<T> list) 

Source Link

Document

Returns a reversed view of the specified list.

Usage

From source file:org.gitools.heatmap.HeatmapDimension.java

public void populateDetails(List<DetailsDecoration> details) {
    Iterable<HeatmapHeader> itHeaders = headers;

    if (getId() == MatrixDimensionKey.COLUMNS) {
        itHeaders = Lists.reverse(headers);
    }//from   w  w w . j ava2s  . c  om

    for (HeatmapHeader header : itHeaders) {
        if (header.isVisible()) {
            header.populateDetails(details, getFocus(), getSelectedHeader() == header);
        }
    }
}

From source file:org.elasticsearch.search.aggregations.bucket.histogram.AbstractHistogramBase.java

@Override
public InternalAggregation reduce(ReduceContext reduceContext) {
    List<InternalAggregation> aggregations = reduceContext.aggregations();
    if (aggregations.size() == 1) {

        if (emptyBucketInfo == null) {
            return aggregations.get(0);
        }/*from w  ww.java  2 s .  c  om*/

        // we need to fill the gaps with empty buckets
        AbstractHistogramBase histo = (AbstractHistogramBase) aggregations.get(0);
        CollectionUtil.introSort(histo.buckets,
                order.asc ? InternalOrder.KEY_ASC.comparator() : InternalOrder.KEY_DESC.comparator());
        List<HistogramBase.Bucket> list = order.asc ? histo.buckets : Lists.reverse(histo.buckets);
        HistogramBase.Bucket prevBucket = null;
        ListIterator<HistogramBase.Bucket> iter = list.listIterator();
        while (iter.hasNext()) {
            // look ahead on the next bucket without advancing the iter
            // so we'll be able to insert elements at the right position
            HistogramBase.Bucket nextBucket = list.get(iter.nextIndex());
            if (prevBucket != null) {
                long key = emptyBucketInfo.rounding.nextRoundingValue(prevBucket.getKey());
                while (key != nextBucket.getKey()) {
                    iter.add(createBucket(key, 0, emptyBucketInfo.subAggregations));
                    key = emptyBucketInfo.rounding.nextRoundingValue(key);
                }
            }
            prevBucket = iter.next();
        }

        if (order != InternalOrder.KEY_ASC && order != InternalOrder.KEY_DESC) {
            CollectionUtil.introSort(histo.buckets, order.comparator());
        }

        return histo;

    }

    AbstractHistogramBase reduced = (AbstractHistogramBase) aggregations.get(0);

    Recycler.V<LongObjectOpenHashMap<List<Bucket>>> bucketsByKey = reduceContext.cacheRecycler()
            .longObjectMap(-1);
    for (InternalAggregation aggregation : aggregations) {
        AbstractHistogramBase<B> histogram = (AbstractHistogramBase) aggregation;
        for (B bucket : histogram.buckets) {
            List<Bucket> bucketList = bucketsByKey.v().get(((Bucket) bucket).key);
            if (bucketList == null) {
                bucketList = new ArrayList<Bucket>(aggregations.size());
                bucketsByKey.v().put(((Bucket) bucket).key, bucketList);
            }
            bucketList.add((Bucket) bucket);
        }
    }

    List<HistogramBase.Bucket> reducedBuckets = new ArrayList<HistogramBase.Bucket>(bucketsByKey.v().size());
    Object[] buckets = bucketsByKey.v().values;
    boolean[] allocated = bucketsByKey.v().allocated;
    for (int i = 0; i < allocated.length; i++) {
        if (allocated[i]) {
            Bucket bucket = ((List<Bucket>) buckets[i]).get(0).reduce(((List<Bucket>) buckets[i]),
                    reduceContext.cacheRecycler());
            reducedBuckets.add(bucket);
        }
    }
    bucketsByKey.release();

    // adding empty buckets in needed
    if (emptyBucketInfo != null) {
        CollectionUtil.introSort(reducedBuckets,
                order.asc ? InternalOrder.KEY_ASC.comparator() : InternalOrder.KEY_DESC.comparator());
        List<HistogramBase.Bucket> list = order.asc ? reducedBuckets : Lists.reverse(reducedBuckets);
        HistogramBase.Bucket prevBucket = null;
        ListIterator<HistogramBase.Bucket> iter = list.listIterator();
        while (iter.hasNext()) {
            HistogramBase.Bucket nextBucket = list.get(iter.nextIndex());
            if (prevBucket != null) {
                long key = emptyBucketInfo.rounding.nextRoundingValue(prevBucket.getKey());
                while (key != nextBucket.getKey()) {
                    iter.add(createBucket(key, 0, emptyBucketInfo.subAggregations));
                    key = emptyBucketInfo.rounding.nextRoundingValue(key);
                }
            }
            prevBucket = iter.next();
        }

        if (order != InternalOrder.KEY_ASC && order != InternalOrder.KEY_DESC) {
            CollectionUtil.introSort(reducedBuckets, order.comparator());
        }

    } else {
        CollectionUtil.introSort(reducedBuckets, order.comparator());
    }

    reduced.buckets = reducedBuckets;
    return reduced;
}

From source file:org.sonatype.nexus.timeline.feeds.sources.ErrorWarningFeedSource.java

/**
 * Extracts ERROR and WARN log lines from given log file. It returns ordered list (newest 1st, oldest last) of
 * found//from  w w w .  j a  v  a2  s.co  m
 * log lines, and that list is maximized to have {@code entriesToExtract} entries.
 *
 * @param logFile          the log file to scan.
 * @param entriesToExtract The number how much "newest" entries should be collected.
 */
private List<FeedEvent> extractEntriesFromLogfile(final File logFile, final int entriesToExtract)
        throws IOException {
    final List<FeedEvent> entries = Lists.newArrayList();
    Closer closer = Closer.create();
    try {
        final BufferedReader reader = Files.newReader(logFile, Charset.forName("UTF-8"));
        String logLine = reader.readLine();
        while (logLine != null) {
            if (logLine.contains(" WARN ") || logLine.contains(" ERROR ")) {

                // FIXME: Grab following stacktrace if any in log
                // if ( StringUtils.isNotEmpty( item.getStackTrace() ) )
                // {
                // // we need <br/> and &nbsp; to display stack trace on RSS
                // String stackTrace = item.getStackTrace().replace(
                // (String) System.getProperties().get( "line.separator" ),
                // "<br/>" );
                // stackTrace = stackTrace.replace( "\t", "&nbsp;&nbsp;&nbsp;&nbsp;" );
                // contentValue.append( "<br/>" ).append( stackTrace );
                // }

                final FeedEvent entry = new FeedEvent("LOG", // ignored
                        "ERROR_WARNING", // ignored
                        new Date(), // TODO: timestamp from log file?
                        null, // author "system"
                        null, // no link (maybe log config? or support?)
                        Maps.<String, String>newHashMap());
                if (logLine.contains(" ERROR ")) {
                    entry.setTitle("Error");
                } else if (logLine.contains(" WARN ")) {
                    entry.setTitle("Warning");
                }
                entry.setContent(logLine);

                entries.add(entry);
                if (entries.size() > entriesToExtract) {
                    entries.remove(0);
                }
            }
            logLine = reader.readLine();
        }
    } catch (Throwable e) {
        throw closer.rethrow(e);
    } finally {
        closer.close();
    }
    return Lists.reverse(entries);
}

From source file:org.gitools.ui.app.heatmap.drawer.header.HeatmapHeaderDrawer.java

public HeatmapHeader getHeader(Point p) {
    int x = 0;//from   ww w  .j ava 2  s  .co  m
    int y = 0;
    if (isHorizontal()) {

        for (AbstractHeatmapHeaderDrawer d : Lists.reverse(drawers)) {
            Dimension sz = d.getSize();
            Rectangle box2 = new Rectangle(x, y, sz.width, sz.height);
            if (box2.contains(p)) {
                d.configure(p, x, y);
                return d.getHeader();
            }
            y += sz.height;
        }
    } else {
        for (AbstractHeatmapHeaderDrawer d : drawers) {
            Dimension sz = d.getSize();
            Rectangle box2 = new Rectangle(x, y, sz.width, sz.height);
            if (box2.contains(p)) {
                d.configure(p, x, y);
                return d.getHeader();
            }
            x += sz.width;
        }
    }
    return null;
}

From source file:co.cask.cdap.logging.read.AvroFileReader.java

public Collection<LogEvent> readLogPrev(Location file, Filter logFilter, long fromTimeMs, final int maxEvents) {
    try {/*w w  w  .ja v  a  2s.  co  m*/
        DataFileReader<GenericRecord> dataFileReader = createReader(file);

        try {
            if (!dataFileReader.hasNext()) {
                return ImmutableList.of();
            }

            GenericRecord datum;
            List<List<LogEvent>> logSegments = Lists.newArrayList();
            int count = 0;

            // Calculate skipLen based on fileLength
            long skipLen = file.length() / 10;
            if (skipLen > DEFAULT_SKIP_LEN) {
                skipLen = DEFAULT_SKIP_LEN;
            } else if (skipLen <= 0) {
                skipLen = DEFAULT_SKIP_LEN;
            }

            List<LogEvent> logSegment = Lists.newArrayList();

            long lastSeekPos;
            long seekPos = file.length();
            while (seekPos > 0) {
                lastSeekPos = seekPos;
                seekPos = seekPos < skipLen ? 0 : seekPos - skipLen;
                dataFileReader.sync(seekPos);

                logSegment = logSegment.isEmpty() ? logSegment : Lists.<LogEvent>newArrayList();
                // read all the elements in the current segment (seekPos up to lastSeekPos)
                while (dataFileReader.hasNext() && !dataFileReader.pastSync(lastSeekPos)) {
                    datum = dataFileReader.next();

                    ILoggingEvent loggingEvent = LoggingEvent.decode(datum);

                    // Stop when reached fromTimeMs
                    if (loggingEvent.getTimeStamp() > fromTimeMs) {
                        break;
                    }

                    if (logFilter.match(loggingEvent)) {
                        ++count;
                        logSegment.add(new LogEvent(loggingEvent,
                                new LogOffset(LogOffset.INVALID_KAFKA_OFFSET, loggingEvent.getTimeStamp())));
                    }
                }

                if (!logSegment.isEmpty()) {
                    logSegments.add(logSegment);
                }

                if (count > maxEvents) {
                    break;
                }
            }

            int skip = count >= maxEvents ? count - maxEvents : 0;
            return Lists.newArrayList(Iterables.skip(Iterables.concat(Lists.reverse(logSegments)), skip));
        } finally {
            try {
                dataFileReader.close();
            } catch (IOException e) {
                LOG.error(String.format("Got exception while closing log file %s", file.toURI()), e);
            }
        }
    } catch (Exception e) {
        LOG.error(String.format("Got exception while reading log file %s", file.toURI()), e);
        throw Throwables.propagate(e);
    }
}

From source file:co.cask.cdap.logging.read.AvroFileLogReader.java

public Collection<LogEvent> readLogPrev(Location file, Filter logFilter, long fromTimeMs, final int maxEvents) {
    try {/*from  www  .j  ava 2  s .  c  o  m*/
        DataFileReader<GenericRecord> dataFileReader = createReader(file);

        try {
            if (!dataFileReader.hasNext()) {
                return ImmutableList.of();
            }

            GenericRecord datum;
            List<List<LogEvent>> logSegments = Lists.newArrayList();
            int count = 0;

            // Calculate skipLen based on fileLength
            long skipLen = file.length() / 10;
            if (skipLen > DEFAULT_SKIP_LEN) {
                skipLen = DEFAULT_SKIP_LEN;
            } else if (skipLen <= 0) {
                skipLen = DEFAULT_SKIP_LEN;
            }

            List<LogEvent> logSegment = Lists.newArrayList();
            long boundaryTimeMs = Long.MAX_VALUE;

            long seekPos = file.length();
            while (seekPos > 0) {
                seekPos = seekPos < skipLen ? 0 : seekPos - skipLen;
                dataFileReader.sync(seekPos);

                logSegment = logSegment.isEmpty() ? logSegment : Lists.<LogEvent>newArrayList();
                long segmentStartTimeMs = Long.MAX_VALUE;
                while (dataFileReader.hasNext()) {
                    datum = dataFileReader.next();

                    ILoggingEvent loggingEvent = LoggingEvent.decode(datum);

                    if (segmentStartTimeMs == Long.MAX_VALUE) {
                        segmentStartTimeMs = loggingEvent.getTimeStamp();
                    }

                    // Stop when reached fromTimeMs, or at the end of current segment.
                    if (loggingEvent.getTimeStamp() > fromTimeMs
                            || loggingEvent.getTimeStamp() >= boundaryTimeMs) {
                        break;
                    }

                    if (logFilter.match(loggingEvent)) {
                        ++count;
                        logSegment.add(new LogEvent(loggingEvent, loggingEvent.getTimeStamp()));
                    }
                }

                boundaryTimeMs = segmentStartTimeMs;

                if (!logSegment.isEmpty()) {
                    logSegments.add(logSegment);
                }

                if (count > maxEvents) {
                    break;
                }
            }

            int skip = count >= maxEvents ? count - maxEvents : 0;
            return Lists.newArrayList(Iterables.skip(Iterables.concat(Lists.reverse(logSegments)), skip));
        } finally {
            try {
                dataFileReader.close();
            } catch (IOException e) {
                LOG.error(String.format("Got exception while closing log file %s", file.toURI()), e);
            }
        }
    } catch (Exception e) {
        LOG.error(String.format("Got exception while reading log file %s", file.toURI()), e);
        throw Throwables.propagate(e);
    }
}

From source file:org.jetbrains.jet.codegen.state.JetTypeMapper.java

@NotNull
public static List<DeclarationDescriptor> getPathWithoutRootNsAndModule(
        @NotNull NamespaceDescriptor descriptor) {
    List<DeclarationDescriptor> path = new ArrayList<DeclarationDescriptor>();
    DeclarationDescriptor current = descriptor;
    while (true) {
        if (current instanceof NamespaceDescriptor
                && DescriptorUtils.isRootNamespace((NamespaceDescriptor) current)) {
            return Lists.reverse(path);
        }// w  w  w  .ja  v  a  2s. c o m
        path.add(current);
        assert current != null : "Namespace must have a parent: " + descriptor;
        current = current.getContainingDeclaration();
    }
}

From source file:de.sep2011.funckit.model.graphmodel.implementations.commands.CreateWirePathCommand.java

private void doPathConnect() {
    AccessPoint srcAp = this.source;
    AccessPoint tarAp = this.target;
    List<IdPoint> locIdPointPath = this.idPointPath;

    /* Make sure that we start on non IDPoint */
    if (srcAp.getBrick() instanceof IdPoint) {
        AccessPoint tmpFst = srcAp;//from w  ww  . jav  a2  s.  c  o m
        srcAp = tarAp;
        tarAp = tmpFst;
        locIdPointPath = Lists.reverse(locIdPointPath);
    }

    /* Add path IdPoints */
    for (IdPoint idp : locIdPointPath) {
        Command addBrickCmd = new AddBrickCommand(circuit, idp);
        addBrickCmd.setNotifyObserversHint(false);
        getDispatcher().dispatch(addBrickCmd);
    }

    /* connect first AccessPoint to first IdPoint */

    AccessPoint connectAP1 = null;
    if (srcAp instanceof Output) {
        connectAP1 = locIdPointPath.get(0).getInputA();
    } else if (srcAp instanceof Input) {
        connectAP1 = locIdPointPath.get(0).getOutputO();
    }
    assert connectAP1 != null;
    Command connectCmd = new BareConnectCommand(circuit, srcAp, connectAP1);
    connectCmd.setNotifyObserversHint(false);
    getDispatcher().dispatch(connectCmd);

    /* connect the IdPoints */
    for (int i = 0; i < locIdPointPath.size() - 1; i++) {
        AccessPoint fstAp = null;
        AccessPoint sndAp = null;

        if (srcAp instanceof Output) {
            fstAp = locIdPointPath.get(i).getOutputO();
            sndAp = locIdPointPath.get(i + 1).getInputA();
        } else if (srcAp instanceof Input) {
            fstAp = locIdPointPath.get(i).getInputA();
            sndAp = locIdPointPath.get(i + 1).getOutputO();
        }

        assert fstAp != null && sndAp != null;

        Command idConnectCmd = new BareConnectCommand(circuit, fstAp, sndAp);
        idConnectCmd.setNotifyObserversHint(false);
        getDispatcher().dispatch(idConnectCmd);
    }

    /* connect second AccessPoint to last IdPoint */
    AccessPoint connectAP2 = null;
    if (tarAp instanceof Output && !(tarAp.getBrick() instanceof IdPoint)) {
        connectAP2 = locIdPointPath.get(locIdPointPath.size() - 1).getInputA();
    } else if (tarAp instanceof Input && !(tarAp.getBrick() instanceof IdPoint)) {
        connectAP2 = locIdPointPath.get(locIdPointPath.size() - 1).getOutputO();
    } else if (tarAp.getBrick() instanceof IdPoint && srcAp instanceof Input) {
        connectAP2 = locIdPointPath.get(locIdPointPath.size() - 1).getInputA();
        tarAp = ((IdPoint) tarAp.getBrick()).getOutputO();
    } else if (tarAp.getBrick() instanceof IdPoint && srcAp instanceof Output) {
        connectAP2 = locIdPointPath.get(locIdPointPath.size() - 1).getOutputO();
        tarAp = ((IdPoint) tarAp.getBrick()).getInputA();

    }
    assert connectAP2 != null;
    getDispatcher().dispatch(new BareConnectCommand(circuit, connectAP2, tarAp).setNotifyObserversHint(false));

}

From source file:Contextual.Stemmer.Context.java

/**
 * ECS Loop Pengembalian Akhiran/*  w  w w  .  j  a v  a 2  s .c om*/
 */
public void loopPengembalianAkhiran() {
    // restore prefix to form [DP+[DP+[DP]]] + Root word
    this.restorePrefix();

    //if there's a problem, try to check if the original list is also reversed
    final ImmutableList<RemovalInterface> _removals = ImmutableList.copyOf(removals);
    List<RemovalInterface> reversedRemovals = Lists.reverse(_removals);
    final String _currentWord = this.getCurrentWord();

    for (RemovalInterface removal : reversedRemovals) {
        if (!this.isSuffixRemoval(removal))
            continue;

        if (removal.getRemovedPart().equals("kan")) {
            this.setCurrentWord(removal.getResult().concat("k"));

            // step 4, 5
            this.removePrefixes();
            if (this.dictionary.contains((this.getCurrentWord())))
                return;

            this.setCurrentWord(removal.getResult().concat("kan"));
        } else
            this.setCurrentWord(removal.getSubject());

        // step 4, 5
        this.removePrefixes();
        if (this.dictionary.contains(this.getCurrentWord()))
            return;
        this.removals = _removals;
        this.setCurrentWord(_currentWord);
    }
}

From source file:yaphyre.app.scenereader.simple.SimpleSceneFileReader.java

@Nonnull
private Shape mapGeometry(@Nonnull GeometryBase geometryBase) {

    final String geometryTypeName = geometryBase.getClass().getSimpleName();

    Shader shader = mapShader(/*from  ww w.  j a  v a  2 s .c o  m*/
            Optional.ofNullable(geometryBase.getShader()).<NamedType>map(GeometryBase.Shader::getColorShader) // Cast no longer needed when more than one shader type is known
                    .orElseGet(() -> ((NamedType) geometryBase.getShaderRef())));

    Transformation transformation = Optional.ofNullable(geometryBase.getTransformationRef())
            .map(ref -> mapTransformation((TransformationBase) ref))
            .orElseGet(() -> Lists.reverse(geometryBase.getTransformation().getIdentityOrScaleOrTranslate())
                    .stream().map(this::mapTransformation)
                    .reduce(Transformation.IDENTITY, Transformation::mul));

    switch (geometryTypeName) {
    case "SimpleSphere":
        return new SimpleSphere(transformation, shader);

    case "Plane":
        return new Plane(transformation, shader);
    }

    final String errorMessage = "Unknown geometry type: '" + geometryTypeName + "'";
    LOGGER.error(errorMessage);
    throw new RuntimeException(errorMessage);
}