Example usage for org.joda.time Interval getStartMillis

List of usage examples for org.joda.time Interval getStartMillis

Introduction

In this page you can find the example usage for org.joda.time Interval getStartMillis.

Prototype

public long getStartMillis() 

Source Link

Document

Gets the start of this time interval which is inclusive.

Usage

From source file:com.metamx.druid.client.CachingClusteredClient.java

License:Open Source License

private Cache.NamedKey computeSegmentCacheKey(String segmentIdentifier, SegmentDescriptor descriptor,
        byte[] queryCacheKey) {
    final Interval segmentQueryInterval = descriptor.getInterval();
    final byte[] versionBytes = descriptor.getVersion().getBytes();

    return new Cache.NamedKey(segmentIdentifier,
            ByteBuffer.allocate(16 + versionBytes.length + 4 + queryCacheKey.length)
                    .putLong(segmentQueryInterval.getStartMillis()).putLong(segmentQueryInterval.getEndMillis())
                    .put(versionBytes).putInt(descriptor.getPartitionNumber()).put(queryCacheKey).array());
}

From source file:com.metamx.druid.index.v1.IncrementalIndexStorageAdapter.java

License:Open Source License

@Override
public Iterable<Cursor> makeCursors(final Filter filter, final Interval interval, final QueryGranularity gran) {
    Interval actualIntervalTmp = interval;
    Interval dataInterval = getInterval();
    if (!actualIntervalTmp.overlaps(dataInterval)) {
        return ImmutableList.of();
    }//ww  w . j a  va2  s. com

    if (actualIntervalTmp.getStart().isBefore(dataInterval.getStart())) {
        actualIntervalTmp = actualIntervalTmp.withStart(dataInterval.getStart());
    }
    if (actualIntervalTmp.getEnd().isAfter(dataInterval.getEnd())) {
        actualIntervalTmp = actualIntervalTmp.withEnd(dataInterval.getEnd());
    }

    final Interval actualInterval = actualIntervalTmp;

    return new Iterable<Cursor>() {
        @Override
        public Iterator<Cursor> iterator() {
            return FunctionalIterator.create(
                    gran.iterable(actualInterval.getStartMillis(), actualInterval.getEndMillis()).iterator())
                    .transform(new Function<Long, Cursor>() {
                        EntryHolder currEntry = new EntryHolder();
                        private final ValueMatcher filterMatcher;

                        {
                            filterMatcher = makeFilterMatcher(filter, currEntry);
                        }

                        @Override
                        public Cursor apply(@Nullable final Long input) {
                            final long timeStart = Math.max(input, actualInterval.getStartMillis());

                            return new Cursor() {
                                private Iterator<Map.Entry<IncrementalIndex.TimeAndDims, Aggregator[]>> baseIter;
                                private ConcurrentNavigableMap<IncrementalIndex.TimeAndDims, Aggregator[]> cursorMap;
                                final DateTime time;
                                int numAdvanced = -1;
                                boolean done;

                                {
                                    cursorMap = index.getSubMap(
                                            new IncrementalIndex.TimeAndDims(timeStart, new String[][] {}),
                                            new IncrementalIndex.TimeAndDims(Math
                                                    .min(actualInterval.getEndMillis(), gran.next(timeStart)),
                                                    new String[][] {}));
                                    time = gran.toDateTime(input);

                                    reset();
                                }

                                @Override
                                public DateTime getTime() {
                                    return time;
                                }

                                @Override
                                public void advance() {
                                    if (!baseIter.hasNext()) {
                                        done = true;
                                        return;
                                    }

                                    while (baseIter.hasNext()) {
                                        currEntry.set(baseIter.next());

                                        if (filterMatcher.matches()) {
                                            return;
                                        }
                                    }

                                    if (!filterMatcher.matches()) {
                                        done = true;
                                    }
                                }

                                @Override
                                public boolean isDone() {
                                    return done;
                                }

                                @Override
                                public void reset() {
                                    baseIter = cursorMap.entrySet().iterator();

                                    if (numAdvanced == -1) {
                                        numAdvanced = 0;
                                        while (baseIter.hasNext()) {
                                            currEntry.set(baseIter.next());
                                            if (filterMatcher.matches()) {
                                                return;
                                            }

                                            numAdvanced++;
                                        }
                                    } else {
                                        Iterators.skip(baseIter, numAdvanced);
                                        if (baseIter.hasNext()) {
                                            currEntry.set(baseIter.next());
                                        }
                                    }

                                    done = cursorMap.size() == 0 || !baseIter.hasNext();

                                }

                                @Override
                                public DimensionSelector makeDimensionSelector(String dimension) {
                                    final String dimensionName = dimension.toLowerCase();
                                    final IncrementalIndex.DimDim dimValLookup = index
                                            .getDimension(dimensionName);
                                    if (dimValLookup == null) {
                                        return null;
                                    }

                                    final int maxId = dimValLookup.size();
                                    final int dimIndex = index.getDimensionIndex(dimensionName);

                                    return new DimensionSelector() {
                                        @Override
                                        public IndexedInts getRow() {
                                            final ArrayList<Integer> vals = Lists.newArrayList();
                                            if (dimIndex < currEntry.getKey().getDims().length) {
                                                final String[] dimVals = currEntry.getKey().getDims()[dimIndex];
                                                if (dimVals != null) {
                                                    for (String dimVal : dimVals) {
                                                        int id = dimValLookup.getId(dimVal);
                                                        if (id < maxId) {
                                                            vals.add(id);
                                                        }
                                                    }
                                                }
                                            }

                                            return new IndexedInts() {
                                                @Override
                                                public int size() {
                                                    return vals.size();
                                                }

                                                @Override
                                                public int get(int index) {
                                                    return vals.get(index);
                                                }

                                                @Override
                                                public Iterator<Integer> iterator() {
                                                    return vals.iterator();
                                                }
                                            };
                                        }

                                        @Override
                                        public int getValueCardinality() {
                                            return dimValLookup.size();
                                        }

                                        @Override
                                        public String lookupName(int id) {
                                            return dimValLookup.getValue(id);
                                        }

                                        @Override
                                        public int lookupId(String name) {
                                            return dimValLookup.getId(name);
                                        }
                                    };
                                }

                                @Override
                                public FloatMetricSelector makeFloatMetricSelector(String metric) {
                                    final String metricName = metric.toLowerCase();
                                    final Integer metricIndexInt = index.getMetricIndex(metricName);
                                    if (metricIndexInt == null) {
                                        return new FloatMetricSelector() {
                                            @Override
                                            public float get() {
                                                return 0.0f;
                                            }
                                        };
                                    }

                                    final int metricIndex = metricIndexInt;

                                    return new FloatMetricSelector() {
                                        @Override
                                        public float get() {
                                            return currEntry.getValue()[metricIndex].getFloat();
                                        }
                                    };
                                }

                                @Override
                                public ComplexMetricSelector makeComplexMetricSelector(String metric) {
                                    final String metricName = metric.toLowerCase();
                                    final Integer metricIndexInt = index.getMetricIndex(metricName);
                                    if (metricIndexInt == null) {
                                        return null;
                                    }

                                    final int metricIndex = metricIndexInt;

                                    final ComplexMetricSerde serde = ComplexMetrics
                                            .getSerdeForType(index.getMetricType(metricName));

                                    return new ComplexMetricSelector() {
                                        @Override
                                        public Class classOfObject() {
                                            return serde.getObjectStrategy().getClazz();
                                        }

                                        @Override
                                        public Object get() {
                                            return currEntry.getValue()[metricIndex].get();
                                        }
                                    };
                                }
                            };
                        }
                    });
        }
    };
}

From source file:com.metamx.druid.index.v1.IncrementalIndexStorageAdapter.java

License:Open Source License

@Override
public Iterable<SearchHit> searchDimensions(final SearchQuery query, final Filter filter) {
    final List<String> dimensions = query.getDimensions();
    final int[] dimensionIndexes;
    final String[] dimensionNames;
    final List<String> dimensionOrder = index.getDimensions();
    if (dimensions == null || dimensions.isEmpty()) {
        dimensionIndexes = new int[dimensionOrder.size()];
        dimensionNames = new String[dimensionIndexes.length];

        Iterator<String> dimensionOrderIter = dimensionOrder.iterator();
        for (int i = 0; i < dimensionIndexes.length; ++i) {
            dimensionNames[i] = dimensionOrderIter.next();
            dimensionIndexes[i] = index.getDimensionIndex(dimensionNames[i]);
        }//w  w w .j  a  va2  s.  c  om
    } else {
        int[] tmpDimensionIndexes = new int[dimensions.size()];
        String[] tmpDimensionNames = new String[dimensions.size()];
        int i = 0;
        for (String dimension : dimensions) {
            Integer dimIndex = index.getDimensionIndex(dimension.toLowerCase());
            if (dimIndex != null) {
                tmpDimensionNames[i] = dimension;
                tmpDimensionIndexes[i] = dimIndex;
                ++i;
            }
        }

        if (i != tmpDimensionIndexes.length) {
            dimensionIndexes = new int[i];
            dimensionNames = new String[i];
            System.arraycopy(tmpDimensionIndexes, 0, dimensionIndexes, 0, i);
            System.arraycopy(tmpDimensionNames, 0, dimensionNames, 0, i);
        } else {
            dimensionIndexes = tmpDimensionIndexes;
            dimensionNames = tmpDimensionNames;
        }
    }

    final List<Interval> queryIntervals = query.getIntervals();
    if (queryIntervals.size() != 1) {
        throw new IAE("Can only handle one interval, got query[%s]", query);
    }

    final Interval queryInterval = queryIntervals.get(0);
    final long intervalStart = queryInterval.getStartMillis();
    final long intervalEnd = queryInterval.getEndMillis();

    final EntryHolder holder = new EntryHolder();
    final ValueMatcher theMatcher = makeFilterMatcher(filter, holder);
    final SearchQuerySpec searchQuerySpec = query.getQuery();
    final TreeSet<SearchHit> retVal = Sets.newTreeSet(query.getSort().getComparator());

    ConcurrentNavigableMap<IncrementalIndex.TimeAndDims, Aggregator[]> facts = index.getSubMap(
            new IncrementalIndex.TimeAndDims(intervalStart, new String[][] {}),
            new IncrementalIndex.TimeAndDims(intervalEnd, new String[][] {}));

    for (Map.Entry<IncrementalIndex.TimeAndDims, Aggregator[]> entry : facts.entrySet()) {
        holder.set(entry);
        final IncrementalIndex.TimeAndDims key = holder.getKey();
        final long timestamp = key.getTimestamp();

        if (timestamp >= intervalStart && timestamp < intervalEnd && theMatcher.matches()) {
            final String[][] dims = key.getDims();

            for (int i = 0; i < dimensionIndexes.length; ++i) {
                if (dimensionIndexes[i] < dims.length) {
                    final String[] dimVals = dims[dimensionIndexes[i]];
                    if (dimVals != null) {
                        for (int j = 0; j < dimVals.length; ++j) {
                            if (searchQuerySpec.accept(dimVals[j])) {
                                retVal.add(new SearchHit(dimensionNames[i], dimVals[j]));
                            }
                        }
                    }
                }
            }
        }
    }

    return new FunctionalIterable<SearchHit>(retVal).limit(query.getLimit());
}

From source file:com.metamx.druid.index.v1.IndexStorageAdapter.java

License:Open Source License

@Override
public Iterable<Cursor> makeCursors(final Filter filter, final Interval interval, final QueryGranularity gran) {
    Interval actualIntervalTmp = interval;
    if (!actualIntervalTmp.overlaps(index.dataInterval)) {
        return ImmutableList.of();
    }/* ww  w.ja v a  2  s . co  m*/

    if (actualIntervalTmp.getStart().isBefore(index.dataInterval.getStart())) {
        actualIntervalTmp = actualIntervalTmp.withStart(index.dataInterval.getStart());
    }
    if (actualIntervalTmp.getEnd().isAfter(index.dataInterval.getEnd())) {
        actualIntervalTmp = actualIntervalTmp.withEnd(index.dataInterval.getEnd());
    }

    final Interval actualInterval = actualIntervalTmp;

    final Pair<Integer, Integer> intervalStartAndEnd = computeTimeStartEnd(actualInterval);

    return new Iterable<Cursor>() {
        @Override
        public Iterator<Cursor> iterator() {
            final Offset baseOffset;
            if (filter == null) {
                baseOffset = new ArrayBasedOffset(ids, intervalStartAndEnd.lhs);
            } else {
                baseOffset = new StartLimitedOffset(
                        new ConciseOffset(filter.goConcise(new IndexBasedBitmapIndexSelector(index))),
                        intervalStartAndEnd.lhs);
            }

            final Map<String, Object> metricHolderCache = Maps.newHashMap();

            // This after call is not perfect, if there is an exception during processing, it will never get called,
            // but it's better than nothing and doing this properly all the time requires a lot more fixerating
            return MoreIterators.after(FunctionalIterator.create(
                    gran.iterable(actualInterval.getStartMillis(), actualInterval.getEndMillis()).iterator())
                    .keep(new Function<Long, Cursor>() {
                        @Override
                        public Cursor apply(final Long intervalStart) {
                            final Offset offset = new TimestampCheckingOffset(baseOffset, index.timeOffsets,
                                    Math.min(actualInterval.getEndMillis(), gran.next(intervalStart)));

                            return new Cursor() {

                                private final Offset initOffset = offset.clone();
                                private Offset cursorOffset = offset;
                                private final DateTime timestamp = gran.toDateTime(intervalStart);

                                @Override
                                public DateTime getTime() {
                                    return timestamp;
                                }

                                @Override
                                public void advance() {
                                    cursorOffset.increment();
                                }

                                @Override
                                public boolean isDone() {
                                    return !cursorOffset.withinBounds();
                                }

                                @Override
                                public void reset() {
                                    cursorOffset = initOffset.clone();
                                }

                                @Override
                                public DimensionSelector makeDimensionSelector(String dimension) {
                                    final String dimensionName = dimension.toLowerCase();
                                    final String[] nameLookup = index.reverseDimLookup.get(dimensionName);
                                    if (nameLookup == null) {
                                        return null;
                                    }

                                    return new DimensionSelector() {
                                        final Map<String, Integer> dimValLookup = index.dimIdLookup
                                                .get(dimensionName);
                                        final DimensionColumn dimColumn = index.dimensionValues
                                                .get(dimensionName);
                                        final int[][] dimensionExpansions = dimColumn.getDimensionExpansions();
                                        final int[] dimensionRowValues = dimColumn.getDimensionRowValues();

                                        @Override
                                        public IndexedInts getRow() {
                                            return new ArrayBasedIndexedInts(
                                                    dimensionExpansions[dimensionRowValues[cursorOffset
                                                            .getOffset()]]);
                                        }

                                        @Override
                                        public int getValueCardinality() {
                                            return nameLookup.length;
                                        }

                                        @Override
                                        public String lookupName(int id) {
                                            return nameLookup[id];
                                        }

                                        @Override
                                        public int lookupId(String name) {
                                            final Integer retVal = dimValLookup.get(name);

                                            return retVal == null ? -1 : retVal;
                                        }
                                    };
                                }

                                @Override
                                public FloatMetricSelector makeFloatMetricSelector(String metric) {
                                    String metricName = metric.toLowerCase();
                                    IndexedFloats cachedFloats = (IndexedFloats) metricHolderCache.get(metric);
                                    if (cachedFloats == null) {
                                        MetricHolder holder = index.metricVals.get(metricName);
                                        if (holder == null) {
                                            return new FloatMetricSelector() {
                                                @Override
                                                public float get() {
                                                    return 0.0f;
                                                }
                                            };
                                        }

                                        cachedFloats = holder.getFloatType();
                                        metricHolderCache.put(metricName, cachedFloats);
                                    }

                                    final IndexedFloats metricVals = cachedFloats;
                                    return new FloatMetricSelector() {
                                        @Override
                                        public float get() {
                                            return metricVals.get(cursorOffset.getOffset());
                                        }
                                    };
                                }

                                @Override
                                public ComplexMetricSelector makeComplexMetricSelector(String metric) {
                                    final String metricName = metric.toLowerCase();
                                    Indexed cachedComplex = (Indexed) metricHolderCache.get(metricName);
                                    if (cachedComplex == null) {
                                        MetricHolder holder = index.metricVals.get(metricName);
                                        if (holder != null) {
                                            cachedComplex = holder.getComplexType();
                                            metricHolderCache.put(metricName, cachedComplex);
                                        }
                                    }

                                    if (cachedComplex == null) {
                                        return null;
                                    }

                                    final Indexed vals = cachedComplex;
                                    return new ComplexMetricSelector() {
                                        @Override
                                        public Class classOfObject() {
                                            return vals.getClazz();
                                        }

                                        @Override
                                        public Object get() {
                                            return vals.get(cursorOffset.getOffset());
                                        }
                                    };
                                }
                            };
                        }
                    }), new Runnable() {
                        @Override
                        public void run() {
                            for (Object object : metricHolderCache.values()) {
                                if (object instanceof Closeable) {
                                    Closeables.closeQuietly((Closeable) object);
                                }
                            }
                        }
                    });
        }
    };
}

From source file:com.metamx.druid.query.IntervalChunkingQueryRunner.java

License:Open Source License

private Iterable<Interval> splitInterval(Interval interval) {
    if (interval.getEndMillis() == interval.getStartMillis()) {
        return Lists.newArrayList(interval);
    }/*w w w  . j  ava2  s.c o m*/

    List<Interval> intervals = Lists.newArrayList();
    Iterator<Long> timestamps = new PeriodGranularity(period, null, null)
            .iterable(interval.getStartMillis(), interval.getEndMillis()).iterator();

    long start = Math.max(timestamps.next(), interval.getStartMillis());
    while (timestamps.hasNext()) {
        long end = timestamps.next();
        intervals.add(new Interval(start, end));
        start = end;
    }

    if (start < interval.getEndMillis()) {
        intervals.add(new Interval(start, interval.getEndMillis()));
    }

    return intervals;
}

From source file:com.metamx.druid.realtime.plumber.RealtimePlumberSchool.java

License:Open Source License

@Override
public Plumber findPlumber(final Schema schema, final FireDepartmentMetrics metrics) {
    verifyState();//from   w  w w. j  a  v a2 s  . co  m

    final RejectionPolicy rejectionPolicy = rejectionPolicyFactory.create(windowPeriod);
    log.info("Creating plumber using rejectionPolicy[%s]", rejectionPolicy);

    return new Plumber() {
        private volatile boolean stopped = false;
        private volatile ExecutorService persistExecutor = null;
        private volatile ScheduledExecutorService scheduledExecutor = null;

        private final Map<Long, Sink> sinks = Maps.newConcurrentMap();
        private final VersionedIntervalTimeline<String, Sink> sinkTimeline = new VersionedIntervalTimeline<String, Sink>(
                String.CASE_INSENSITIVE_ORDER);

        @Override
        public void startJob() {
            computeBaseDir(schema).mkdirs();
            initializeExecutors();
            bootstrapSinksFromDisk();
            registerServerViewCallback();
            startPersistThread();
        }

        @Override
        public Sink getSink(long timestamp) {
            if (!rejectionPolicy.accept(timestamp)) {
                return null;
            }

            final long truncatedTime = segmentGranularity.truncate(timestamp);

            Sink retVal = sinks.get(truncatedTime);

            if (retVal == null) {
                final Interval sinkInterval = new Interval(new DateTime(truncatedTime),
                        segmentGranularity.increment(new DateTime(truncatedTime)));

                retVal = new Sink(sinkInterval, schema, versioningPolicy.getVersion(sinkInterval));

                try {
                    segmentAnnouncer.announceSegment(retVal.getSegment());
                    sinks.put(truncatedTime, retVal);
                    sinkTimeline.add(retVal.getInterval(), retVal.getVersion(),
                            new SingleElementPartitionChunk<Sink>(retVal));
                } catch (IOException e) {
                    log.makeAlert(e, "Failed to announce new segment[%s]", schema.getDataSource())
                            .addData("interval", retVal.getInterval()).emit();
                }
            }

            return retVal;
        }

        @Override
        public <T> QueryRunner<T> getQueryRunner(final Query<T> query) {
            final QueryRunnerFactory<T, Query<T>> factory = conglomerate.findFactory(query);
            final QueryToolChest<T, Query<T>> toolchest = factory.getToolchest();

            final Function<Query<T>, ServiceMetricEvent.Builder> builderFn = new Function<Query<T>, ServiceMetricEvent.Builder>() {

                @Override
                public ServiceMetricEvent.Builder apply(@Nullable Query<T> input) {
                    return toolchest.makeMetricBuilder(query);
                }
            };

            List<TimelineObjectHolder<String, Sink>> querySinks = Lists.newArrayList();
            for (Interval interval : query.getIntervals()) {
                querySinks.addAll(sinkTimeline.lookup(interval));
            }

            return toolchest.mergeResults(factory.mergeRunners(EXEC, FunctionalIterable.create(querySinks)
                    .transform(new Function<TimelineObjectHolder<String, Sink>, QueryRunner<T>>() {
                        @Override
                        public QueryRunner<T> apply(TimelineObjectHolder<String, Sink> holder) {
                            final Sink theSink = holder.getObject().getChunk(0).getObject();
                            return new SpecificSegmentQueryRunner<T>(new MetricsEmittingQueryRunner<T>(emitter,
                                    builderFn, factory.mergeRunners(EXEC, Iterables.transform(theSink,
                                            new Function<FireHydrant, QueryRunner<T>>() {
                                                @Override
                                                public QueryRunner<T> apply(FireHydrant input) {
                                                    return factory.createRunner(input.getSegment());
                                                }
                                            }))),
                                    new SpecificSegmentSpec(new SegmentDescriptor(holder.getInterval(),
                                            theSink.getSegment().getVersion(),
                                            theSink.getSegment().getShardSpec().getPartitionNum())));
                        }
                    })));
        }

        @Override
        public void persist(final Runnable commitRunnable) {
            final List<Pair<FireHydrant, Interval>> indexesToPersist = Lists.newArrayList();
            for (Sink sink : sinks.values()) {
                if (sink.swappable()) {
                    indexesToPersist.add(Pair.of(sink.swap(), sink.getInterval()));
                }
            }

            log.info("Submitting persist runnable for dataSource[%s]", schema.getDataSource());

            persistExecutor.execute(new ThreadRenamingRunnable(
                    String.format("%s-incremental-persist", schema.getDataSource())) {
                @Override
                public void doRun() {
                    for (Pair<FireHydrant, Interval> pair : indexesToPersist) {
                        metrics.incrementRowOutputCount(persistHydrant(pair.lhs, schema, pair.rhs));
                    }
                    commitRunnable.run();
                }
            });
        }

        // Submits persist-n-merge task for a Sink to the persistExecutor
        private void persistAndMerge(final long truncatedTime, final Sink sink) {
            final String threadName = String.format("%s-%s-persist-n-merge", schema.getDataSource(),
                    new DateTime(truncatedTime));
            persistExecutor.execute(new ThreadRenamingRunnable(threadName) {
                @Override
                public void doRun() {
                    final Interval interval = sink.getInterval();

                    for (FireHydrant hydrant : sink) {
                        if (!hydrant.hasSwapped()) {
                            log.info("Hydrant[%s] hasn't swapped yet, swapping. Sink[%s]", hydrant, sink);
                            final int rowCount = persistHydrant(hydrant, schema, interval);
                            metrics.incrementRowOutputCount(rowCount);
                        }
                    }

                    final File mergedTarget = new File(computePersistDir(schema, interval), "merged");
                    if (mergedTarget.exists()) {
                        log.info("Skipping already-merged sink: %s", sink);
                        return;
                    }

                    File mergedFile = null;
                    try {
                        List<QueryableIndex> indexes = Lists.newArrayList();
                        for (FireHydrant fireHydrant : sink) {
                            Segment segment = fireHydrant.getSegment();
                            final QueryableIndex queryableIndex = segment.asQueryableIndex();
                            log.info("Adding hydrant[%s]", fireHydrant);
                            indexes.add(queryableIndex);
                        }

                        mergedFile = IndexMerger.mergeQueryableIndex(indexes, schema.getAggregators(),
                                mergedTarget);

                        QueryableIndex index = IndexIO.loadIndex(mergedFile);

                        DataSegment segment = dataSegmentPusher.push(mergedFile, sink.getSegment()
                                .withDimensions(Lists.newArrayList(index.getAvailableDimensions())));

                        segmentPublisher.publishSegment(segment);
                    } catch (IOException e) {
                        log.makeAlert(e, "Failed to persist merged index[%s]", schema.getDataSource())
                                .addData("interval", interval).emit();
                    }

                    if (mergedFile != null) {
                        try {
                            if (mergedFile != null) {
                                log.info("Deleting Index File[%s]", mergedFile);
                                FileUtils.deleteDirectory(mergedFile);
                            }
                        } catch (IOException e) {
                            log.warn(e, "Error deleting directory[%s]", mergedFile);
                        }
                    }
                }
            });
        }

        @Override
        public void finishJob() {
            log.info("Shutting down...");

            for (final Map.Entry<Long, Sink> entry : sinks.entrySet()) {
                persistAndMerge(entry.getKey(), entry.getValue());
            }

            while (!sinks.isEmpty()) {
                try {
                    log.info("Cannot shut down yet! Sinks remaining: %s", Joiner.on(", ")
                            .join(Iterables.transform(sinks.values(), new Function<Sink, String>() {
                                @Override
                                public String apply(Sink input) {
                                    return input.getSegment().getIdentifier();
                                }
                            })));

                    synchronized (handoffCondition) {
                        while (!sinks.isEmpty()) {
                            handoffCondition.wait();
                        }
                    }
                } catch (InterruptedException e) {
                    throw Throwables.propagate(e);
                }
            }

            // scheduledExecutor is shutdown here, but persistExecutor is shutdown when the
            // ServerView sends it a new segment callback
            if (scheduledExecutor != null) {
                scheduledExecutor.shutdown();
            }

            stopped = true;
        }

        private void initializeExecutors() {
            if (persistExecutor == null) {
                persistExecutor = Executors.newFixedThreadPool(1,
                        new ThreadFactoryBuilder().setDaemon(true).setNameFormat("plumber_persist_%d").build());
            }
            if (scheduledExecutor == null) {
                scheduledExecutor = Executors.newScheduledThreadPool(1, new ThreadFactoryBuilder()
                        .setDaemon(true).setNameFormat("plumber_scheduled_%d").build());
            }
        }

        private void bootstrapSinksFromDisk() {
            for (File sinkDir : computeBaseDir(schema).listFiles()) {
                Interval sinkInterval = new Interval(sinkDir.getName().replace("_", "/"));

                //final File[] sinkFiles = sinkDir.listFiles();
                // To avoid reading and listing of "merged" dir
                final File[] sinkFiles = sinkDir.listFiles(new FilenameFilter() {
                    @Override
                    public boolean accept(File dir, String fileName) {
                        return !(Ints.tryParse(fileName) == null);
                    }
                });
                Arrays.sort(sinkFiles, new Comparator<File>() {
                    @Override
                    public int compare(File o1, File o2) {
                        try {
                            return Ints.compare(Integer.parseInt(o1.getName()), Integer.parseInt(o2.getName()));
                        } catch (NumberFormatException e) {
                            log.error(e, "Couldn't compare as numbers? [%s][%s]", o1, o2);
                            return o1.compareTo(o2);
                        }
                    }
                });

                try {
                    List<FireHydrant> hydrants = Lists.newArrayList();
                    for (File segmentDir : sinkFiles) {
                        log.info("Loading previously persisted segment at [%s]", segmentDir);

                        // Although this has been tackled at start of this method.
                        // Just a doubly-check added to skip "merged" dir. from being added to hydrants 
                        // If 100% sure that this is not needed, this check can be removed.
                        if (Ints.tryParse(segmentDir.getName()) == null) {
                            continue;
                        }

                        hydrants.add(
                                new FireHydrant(new QueryableIndexSegment(null, IndexIO.loadIndex(segmentDir)),
                                        Integer.parseInt(segmentDir.getName())));
                    }

                    Sink currSink = new Sink(sinkInterval, schema, versioningPolicy.getVersion(sinkInterval),
                            hydrants);
                    sinks.put(sinkInterval.getStartMillis(), currSink);
                    sinkTimeline.add(currSink.getInterval(), currSink.getVersion(),
                            new SingleElementPartitionChunk<Sink>(currSink));

                    segmentAnnouncer.announceSegment(currSink.getSegment());
                } catch (IOException e) {
                    log.makeAlert(e, "Problem loading sink[%s] from disk.", schema.getDataSource())
                            .addData("interval", sinkInterval).emit();
                }
            }
        }

        private void registerServerViewCallback() {
            serverView.registerSegmentCallback(persistExecutor, new ServerView.BaseSegmentCallback() {
                @Override
                public ServerView.CallbackAction segmentAdded(DruidServer server, DataSegment segment) {
                    if (stopped) {
                        log.info("Unregistering ServerViewCallback");
                        persistExecutor.shutdown();
                        return ServerView.CallbackAction.UNREGISTER;
                    }

                    if ("realtime".equals(server.getType())) {
                        return ServerView.CallbackAction.CONTINUE;
                    }

                    log.debug("Checking segment[%s] on server[%s]", segment, server);
                    if (schema.getDataSource().equals(segment.getDataSource())) {
                        final Interval interval = segment.getInterval();
                        for (Map.Entry<Long, Sink> entry : sinks.entrySet()) {
                            final Long sinkKey = entry.getKey();
                            if (interval.contains(sinkKey)) {
                                final Sink sink = entry.getValue();
                                log.info("Segment[%s] matches sink[%s] on server[%s]", segment, sink, server);

                                final String segmentVersion = segment.getVersion();
                                final String sinkVersion = sink.getSegment().getVersion();
                                if (segmentVersion.compareTo(sinkVersion) >= 0) {
                                    log.info("Segment version[%s] >= sink version[%s]", segmentVersion,
                                            sinkVersion);
                                    try {
                                        segmentAnnouncer.unannounceSegment(sink.getSegment());
                                        FileUtils
                                                .deleteDirectory(computePersistDir(schema, sink.getInterval()));
                                        log.info("Removing sinkKey %d for segment %s", sinkKey,
                                                sink.getSegment().getIdentifier());
                                        sinks.remove(sinkKey);
                                        sinkTimeline.remove(sink.getInterval(), sink.getVersion(),
                                                new SingleElementPartitionChunk<Sink>(sink));

                                        synchronized (handoffCondition) {
                                            handoffCondition.notifyAll();
                                        }
                                    } catch (IOException e) {
                                        log.makeAlert(e, "Unable to delete old segment for dataSource[%s].",
                                                schema.getDataSource()).addData("interval", sink.getInterval())
                                                .emit();
                                    }
                                }
                            }
                        }
                    }

                    return ServerView.CallbackAction.CONTINUE;
                }
            });
        }

        private void startPersistThread() {
            final long truncatedNow = segmentGranularity.truncate(new DateTime()).getMillis();
            final long windowMillis = windowPeriod.toStandardDuration().getMillis();

            log.info("Expect to run at [%s]", new DateTime().plus(new Duration(System.currentTimeMillis(),
                    segmentGranularity.increment(truncatedNow) + windowMillis)));

            ScheduledExecutors.scheduleAtFixedRate(scheduledExecutor,
                    new Duration(System.currentTimeMillis(),
                            segmentGranularity.increment(truncatedNow) + windowMillis),
                    new Duration(truncatedNow, segmentGranularity.increment(truncatedNow)),
                    new ThreadRenamingCallable<ScheduledExecutors.Signal>(String.format("%s-overseer-%d",
                            schema.getDataSource(), schema.getShardSpec().getPartitionNum())) {
                        @Override
                        public ScheduledExecutors.Signal doCall() {
                            if (stopped) {
                                log.info("Stopping merge-n-push overseer thread");
                                return ScheduledExecutors.Signal.STOP;
                            }

                            log.info("Starting merge and push.");

                            long minTimestamp = segmentGranularity
                                    .truncate(rejectionPolicy.getCurrMaxTime().minus(windowMillis)).getMillis();

                            List<Map.Entry<Long, Sink>> sinksToPush = Lists.newArrayList();
                            for (Map.Entry<Long, Sink> entry : sinks.entrySet()) {
                                final Long intervalStart = entry.getKey();
                                if (intervalStart < minTimestamp) {
                                    log.info("Adding entry[%s] for merge and push.", entry);
                                    sinksToPush.add(entry);
                                }
                            }

                            for (final Map.Entry<Long, Sink> entry : sinksToPush) {
                                persistAndMerge(entry.getKey(), entry.getValue());
                            }

                            if (stopped) {
                                log.info("Stopping merge-n-push overseer thread");
                                return ScheduledExecutors.Signal.STOP;
                            } else {
                                return ScheduledExecutors.Signal.REPEAT;
                            }
                        }
                    });
        }
    };
}

From source file:com.metamx.druid.realtime.plumber.Sink.java

License:Open Source License

public Sink(Interval interval, Schema schema, String version) {
    this.schema = schema;
    this.interval = interval;
    this.version = version;

    makeNewCurrIndex(interval.getStartMillis(), schema);
}

From source file:com.metamx.druid.realtime.plumber.Sink.java

License:Open Source License

public Sink(Interval interval, Schema schema, String version, List<FireHydrant> hydrants) {
    this.schema = schema;
    this.interval = interval;
    this.version = version;

    for (int i = 0; i < hydrants.size(); ++i) {
        final FireHydrant hydrant = hydrants.get(i);
        if (hydrant.getCount() != i) {
            throw new ISE("hydrant[%s] not the right count[%s]", hydrant, i);
        }/*from www .j a  v  a  2 s.  c  o  m*/
    }
    this.hydrants.addAll(hydrants);

    makeNewCurrIndex(interval.getStartMillis(), schema);
}

From source file:com.metamx.druid.realtime.RealtimePlumberSchool.java

License:Open Source License

@Override
public Plumber findPlumber(final Schema schema, final FireDepartmentMetrics metrics) {
    verifyState();/*from  w w w . j ava2 s.c  om*/
    initializeExecutors();

    computeBaseDir(schema).mkdirs();

    final Map<Long, Sink> sinks = Maps.newConcurrentMap();

    for (File sinkDir : computeBaseDir(schema).listFiles()) {
        Interval sinkInterval = new Interval(sinkDir.getName().replace("_", "/"));

        final File[] sinkFiles = sinkDir.listFiles();
        Arrays.sort(sinkFiles, new Comparator<File>() {
            @Override
            public int compare(File o1, File o2) {
                try {
                    return Ints.compare(Integer.parseInt(o1.getName()), Integer.parseInt(o2.getName()));
                } catch (NumberFormatException e) {
                    log.error(e, "Couldn't compare as numbers? [%s][%s]", o1, o2);
                    return o1.compareTo(o2);
                }
            }
        });

        try {
            List<FireHydrant> hydrants = Lists.newArrayList();
            for (File segmentDir : sinkFiles) {
                log.info("Loading previously persisted segment at [%s]", segmentDir);
                hydrants.add(new FireHydrant(new QueryableIndexSegment(null, IndexIO.loadIndex(segmentDir)),
                        Integer.parseInt(segmentDir.getName())));
            }

            Sink currSink = new Sink(sinkInterval, schema, hydrants);
            sinks.put(sinkInterval.getStartMillis(), currSink);

            metadataUpdater.announceSegment(currSink.getSegment());
        } catch (IOException e) {
            log.makeAlert(e, "Problem loading sink[%s] from disk.", schema.getDataSource())
                    .addData("interval", sinkInterval).emit();
        }
    }

    serverView.registerSegmentCallback(persistExecutor, new ServerView.BaseSegmentCallback() {
        @Override
        public ServerView.CallbackAction segmentAdded(DruidServer server, DataSegment segment) {
            if ("realtime".equals(server.getType())) {
                return ServerView.CallbackAction.CONTINUE;
            }

            log.debug("Checking segment[%s] on server[%s]", segment, server);
            if (schema.getDataSource().equals(segment.getDataSource())) {
                final Interval interval = segment.getInterval();
                for (Map.Entry<Long, Sink> entry : sinks.entrySet()) {
                    final Long sinkKey = entry.getKey();
                    if (interval.contains(sinkKey)) {
                        final Sink sink = entry.getValue();
                        log.info("Segment matches sink[%s]", sink);

                        if (segment.getVersion().compareTo(sink.getSegment().getVersion()) >= 0) {
                            try {
                                metadataUpdater.unannounceSegment(sink.getSegment());
                                FileUtils.deleteDirectory(computePersistDir(schema, sink.getInterval()));
                                sinks.remove(sinkKey);
                            } catch (IOException e) {
                                log.makeAlert(e, "Unable to delete old segment for dataSource[%s].",
                                        schema.getDataSource()).addData("interval", sink.getInterval()).emit();
                            }
                        }
                    }
                }
            }

            return ServerView.CallbackAction.CONTINUE;
        }
    });

    final long truncatedNow = segmentGranularity.truncate(new DateTime()).getMillis();
    final long windowMillis = windowPeriod.toStandardDuration().getMillis();
    final RejectionPolicy rejectionPolicy = rejectionPolicyFactory.create(windowPeriod);
    log.info("Creating plumber using rejectionPolicy[%s]", rejectionPolicy);

    log.info("Expect to run at [%s]", new DateTime().plus(new Duration(System.currentTimeMillis(),
            segmentGranularity.increment(truncatedNow) + windowMillis)));

    ScheduledExecutors.scheduleAtFixedRate(scheduledExecutor,
            new Duration(System.currentTimeMillis(), segmentGranularity.increment(truncatedNow) + windowMillis),
            new Duration(truncatedNow, segmentGranularity.increment(truncatedNow)),
            new ThreadRenamingRunnable(String.format("%s-overseer", schema.getDataSource())) {
                @Override
                public void doRun() {
                    log.info("Starting merge and push.");

                    long minTimestamp = segmentGranularity.truncate(rejectionPolicy.getCurrMaxTime())
                            .getMillis() - windowMillis;

                    List<Map.Entry<Long, Sink>> sinksToPush = Lists.newArrayList();
                    for (Map.Entry<Long, Sink> entry : sinks.entrySet()) {
                        final Long intervalStart = entry.getKey();
                        if (intervalStart < minTimestamp) {
                            log.info("Adding entry[%s] for merge and push.", entry);
                            sinksToPush.add(entry);
                        }
                    }

                    for (final Map.Entry<Long, Sink> entry : sinksToPush) {
                        final Sink sink = entry.getValue();

                        final String threadName = String.format("%s-%s-persist-n-merge", schema.getDataSource(),
                                new DateTime(entry.getKey()));
                        persistExecutor.execute(new ThreadRenamingRunnable(threadName) {
                            @Override
                            public void doRun() {
                                final Interval interval = sink.getInterval();

                                for (FireHydrant hydrant : sink) {
                                    if (!hydrant.hasSwapped()) {
                                        log.info("Hydrant[%s] hasn't swapped yet, swapping. Sink[%s]", hydrant,
                                                sink);
                                        final int rowCount = persistHydrant(hydrant, schema, interval);
                                        metrics.incrementRowOutputCount(rowCount);
                                    }
                                }

                                final File mergedFile;
                                try {
                                    List<QueryableIndex> indexes = Lists.newArrayList();
                                    for (FireHydrant fireHydrant : sink) {
                                        Segment segment = fireHydrant.getSegment();
                                        final QueryableIndex queryableIndex = segment.asQueryableIndex();
                                        log.info("Adding hydrant[%s]", fireHydrant);
                                        indexes.add(queryableIndex);
                                    }

                                    mergedFile = IndexMerger.mergeQueryableIndex(indexes,
                                            schema.getAggregators(),
                                            new File(computePersistDir(schema, interval), "merged"));

                                    QueryableIndex index = IndexIO.loadIndex(mergedFile);

                                    DataSegment segment = segmentPusher.push(mergedFile,
                                            sink.getSegment().withDimensions(
                                                    Lists.newArrayList(index.getAvailableDimensions())));

                                    metadataUpdater.publishSegment(segment);
                                } catch (IOException e) {
                                    log.makeAlert(e, "Failed to persist merged index[%s]",
                                            schema.getDataSource()).addData("interval", interval).emit();
                                }
                            }
                        });
                    }
                }
            });

    return new Plumber() {
        @Override
        public Sink getSink(long timestamp) {
            if (!rejectionPolicy.accept(timestamp)) {
                return null;
            }

            final long truncatedTime = segmentGranularity.truncate(timestamp);

            Sink retVal = sinks.get(truncatedTime);

            if (retVal == null) {
                retVal = new Sink(new Interval(new DateTime(truncatedTime),
                        segmentGranularity.increment(new DateTime(truncatedTime))), schema);

                try {
                    metadataUpdater.announceSegment(retVal.getSegment());

                    sinks.put(truncatedTime, retVal);
                } catch (IOException e) {
                    log.makeAlert(e, "Failed to announce new segment[%s]", schema.getDataSource())
                            .addData("interval", retVal.getInterval()).emit();
                }
            }

            return retVal;
        }

        @Override
        public <T> QueryRunner<T> getQueryRunner(final Query<T> query) {
            final QueryRunnerFactory<T, Query<T>> factory = conglomerate.findFactory(query);
            final Function<Query<T>, ServiceMetricEvent.Builder> builderFn = new Function<Query<T>, ServiceMetricEvent.Builder>() {
                private final QueryToolChest<T, Query<T>> toolchest = factory.getToolchest();

                @Override
                public ServiceMetricEvent.Builder apply(@Nullable Query<T> input) {
                    return toolchest.makeMetricBuilder(query);
                }
            };

            return factory.mergeRunners(EXEC,
                    FunctionalIterable.create(sinks.values()).transform(new Function<Sink, QueryRunner<T>>() {
                        @Override
                        public QueryRunner<T> apply(@Nullable Sink input) {
                            return new MetricsEmittingQueryRunner<T>(emitter, builderFn, factory.mergeRunners(
                                    EXEC,
                                    Iterables.transform(input, new Function<FireHydrant, QueryRunner<T>>() {
                                        @Override
                                        public QueryRunner<T> apply(@Nullable FireHydrant input) {
                                            return factory.createRunner(input.getSegment());
                                        }
                                    })));
                        }
                    }));
        }

        @Override
        public void persist(final Runnable commitRunnable) {
            final List<Pair<FireHydrant, Interval>> indexesToPersist = Lists.newArrayList();
            for (Sink sink : sinks.values()) {
                if (sink.swappable()) {
                    indexesToPersist.add(Pair.of(sink.swap(), sink.getInterval()));
                }
            }

            log.info("Submitting persist runnable for dataSource[%s]", schema.getDataSource());

            persistExecutor.execute(new ThreadRenamingRunnable(
                    String.format("%s-incremental-persist", schema.getDataSource())) {
                @Override
                public void doRun() {
                    for (Pair<FireHydrant, Interval> pair : indexesToPersist) {
                        metrics.incrementRowOutputCount(persistHydrant(pair.lhs, schema, pair.rhs));
                    }
                    commitRunnable.run();
                }
            });
        }

        @Override
        public void finishJob() {
            throw new UnsupportedOperationException();
        }
    };
}

From source file:com.metamx.druid.realtime.Sink.java

License:Open Source License

public Sink(Interval interval, Schema schema) {
    this.schema = schema;
    this.interval = interval;

    makeNewCurrIndex(interval.getStartMillis(), schema);
}