Example usage for org.joda.time Interval getEnd

List of usage examples for org.joda.time Interval getEnd

Introduction

In this page you can find the example usage for org.joda.time Interval getEnd.

Prototype

public DateTime getEnd() 

Source Link

Document

Gets the end of this time interval, which is exclusive, as a DateTime.

Usage

From source file:io.druid.metadata.IndexerSQLMetadataStorageCoordinator.java

License:Apache License

public List<DataSegment> getUsedSegmentsForInterval(final String dataSource, final Interval interval)
        throws IOException {
    final VersionedIntervalTimeline<String, DataSegment> timeline = connector.getDBI()
            .withHandle(new HandleCallback<VersionedIntervalTimeline<String, DataSegment>>() {
                @Override/*from ww w .  j a  v a  2s .co  m*/
                public VersionedIntervalTimeline<String, DataSegment> withHandle(Handle handle)
                        throws IOException {
                    final VersionedIntervalTimeline<String, DataSegment> timeline = new VersionedIntervalTimeline<String, DataSegment>(
                            Ordering.natural());

                    final ResultIterator<byte[]> dbSegments = handle.createQuery(String.format(
                            "SELECT payload FROM %s WHERE used = true AND dataSource = :dataSource AND start <= :end and \"end\" >= :start  AND used = true",
                            dbTables.getSegmentsTable())).bind("dataSource", dataSource)
                            .bind("start", interval.getStart().toString())
                            .bind("end", interval.getEnd().toString()).map(ByteArrayMapper.FIRST).iterator();

                    while (dbSegments.hasNext()) {
                        final byte[] payload = dbSegments.next();

                        DataSegment segment = jsonMapper.readValue(payload, DataSegment.class);

                        timeline.add(segment.getInterval(), segment.getVersion(),
                                segment.getShardSpec().createChunk(segment));

                    }

                    dbSegments.close();

                    return timeline;

                }
            });

    return Lists.newArrayList(Iterables.concat(Iterables.transform(timeline.lookup(interval),
            new Function<TimelineObjectHolder<String, DataSegment>, Iterable<DataSegment>>() {
                @Override
                public Iterable<DataSegment> apply(TimelineObjectHolder<String, DataSegment> input) {
                    return input.getObject().payloads();
                }
            })));
}

From source file:io.druid.metadata.IndexerSQLMetadataStorageCoordinator.java

License:Apache License

public List<DataSegment> getUnusedSegmentsForInterval(final String dataSource, final Interval interval) {
    List<DataSegment> matchingSegments = connector.getDBI().withHandle(new HandleCallback<List<DataSegment>>() {
        @Override/*from  www. ja va2  s  . c om*/
        public List<DataSegment> withHandle(Handle handle) throws IOException, SQLException {
            return handle.createQuery(String.format(
                    "SELECT payload FROM %s WHERE dataSource = :dataSource and start >= :start and \"end\" <= :end and used = false",
                    dbTables.getSegmentsTable())).bind("dataSource", dataSource)
                    .bind("start", interval.getStart().toString()).bind("end", interval.getEnd().toString())
                    .map(ByteArrayMapper.FIRST)
                    .fold(Lists.<DataSegment>newArrayList(), new Folder3<List<DataSegment>, byte[]>() {
                        @Override
                        public List<DataSegment> fold(List<DataSegment> accumulator, byte[] payload,
                                FoldController foldController, StatementContext statementContext)
                                throws SQLException {
                            try {
                                accumulator.add(jsonMapper.readValue(payload, DataSegment.class));
                                return accumulator;
                            } catch (Exception e) {
                                throw Throwables.propagate(e);
                            }
                        }
                    });
        }
    });

    log.info("Found %,d segments for %s for interval %s.", matchingSegments.size(), dataSource, interval);
    return matchingSegments;
}

From source file:io.druid.segment.incremental.IncrementalIndexStorageAdapter.java

License:Apache License

@Override
public Sequence<Cursor> makeCursors(final Filter filter, final Interval interval, final QueryGranularity gran) {
    if (index.isEmpty()) {
        return Sequences.empty();
    }//w  w  w .  java  2s . co m

    Interval actualIntervalTmp = interval;

    final Interval dataInterval = new Interval(getMinTime().getMillis(),
            gran.next(gran.truncate(getMaxTime().getMillis())));

    if (!actualIntervalTmp.overlaps(dataInterval)) {
        return Sequences.empty();
    }

    if (actualIntervalTmp.getStart().isBefore(dataInterval.getStart())) {
        actualIntervalTmp = actualIntervalTmp.withStart(dataInterval.getStart());
    }
    if (actualIntervalTmp.getEnd().isAfter(dataInterval.getEnd())) {
        actualIntervalTmp = actualIntervalTmp.withEnd(dataInterval.getEnd());
    }

    final Interval actualInterval = actualIntervalTmp;

    return Sequences.map(
            Sequences.simple(gran.iterable(actualInterval.getStartMillis(), actualInterval.getEndMillis())),
            new Function<Long, Cursor>() {
                EntryHolder currEntry = new EntryHolder();
                private final ValueMatcher filterMatcher;

                {
                    filterMatcher = makeFilterMatcher(filter, currEntry);
                }

                @Override
                public Cursor apply(@Nullable final Long input) {
                    final long timeStart = Math.max(input, actualInterval.getStartMillis());

                    return new Cursor() {
                        private Iterator<Map.Entry<IncrementalIndex.TimeAndDims, Integer>> baseIter;
                        private ConcurrentNavigableMap<IncrementalIndex.TimeAndDims, Integer> cursorMap;
                        final DateTime time;
                        int numAdvanced = -1;
                        boolean done;

                        {
                            cursorMap = index.getSubMap(
                                    new IncrementalIndex.TimeAndDims(timeStart, new String[][] {}),
                                    new IncrementalIndex.TimeAndDims(
                                            Math.min(actualInterval.getEndMillis(), gran.next(input)),
                                            new String[][] {}));
                            time = gran.toDateTime(input);

                            reset();
                        }

                        @Override
                        public DateTime getTime() {
                            return time;
                        }

                        @Override
                        public void advance() {
                            if (!baseIter.hasNext()) {
                                done = true;
                                return;
                            }

                            while (baseIter.hasNext()) {
                                if (Thread.interrupted()) {
                                    throw new QueryInterruptedException();
                                }

                                currEntry.set(baseIter.next());

                                if (filterMatcher.matches()) {
                                    return;
                                }
                            }

                            if (!filterMatcher.matches()) {
                                done = true;
                            }
                        }

                        @Override
                        public void advanceTo(int offset) {
                            int count = 0;
                            while (count < offset && !isDone()) {
                                advance();
                                count++;
                            }
                        }

                        @Override
                        public boolean isDone() {
                            return done;
                        }

                        @Override
                        public void reset() {
                            baseIter = cursorMap.entrySet().iterator();

                            if (numAdvanced == -1) {
                                numAdvanced = 0;
                            } else {
                                Iterators.advance(baseIter, numAdvanced);
                            }

                            if (Thread.interrupted()) {
                                throw new QueryInterruptedException();
                            }

                            boolean foundMatched = false;
                            while (baseIter.hasNext()) {
                                currEntry.set(baseIter.next());
                                if (filterMatcher.matches()) {
                                    foundMatched = true;
                                    break;
                                }

                                numAdvanced++;
                            }

                            done = !foundMatched && (cursorMap.size() == 0 || !baseIter.hasNext());
                        }

                        @Override
                        public DimensionSelector makeDimensionSelector(final String dimension,
                                @Nullable final ExtractionFn extractionFn) {
                            if (dimension.equals(Column.TIME_COLUMN_NAME)) {
                                return new SingleScanTimeDimSelector(makeLongColumnSelector(dimension),
                                        extractionFn);
                            }

                            final IncrementalIndex.DimDim dimValLookup = index.getDimension(dimension);
                            if (dimValLookup == null) {
                                return NULL_DIMENSION_SELECTOR;
                            }

                            final int maxId = dimValLookup.size();
                            final int dimIndex = index.getDimensionIndex(dimension);

                            return new DimensionSelector() {
                                @Override
                                public IndexedInts getRow() {
                                    final ArrayList<Integer> vals = Lists.newArrayList();
                                    if (dimIndex < currEntry.getKey().getDims().length) {
                                        final String[] dimVals = currEntry.getKey().getDims()[dimIndex];
                                        if (dimVals != null) {
                                            for (String dimVal : dimVals) {
                                                int id = dimValLookup.getId(dimVal);
                                                if (id < maxId) {
                                                    vals.add(id);
                                                }
                                            }
                                        }
                                    }
                                    // check for null entry
                                    if (vals.isEmpty() && dimValLookup.contains(null)) {
                                        int id = dimValLookup.getId(null);
                                        if (id < maxId) {
                                            vals.add(id);
                                        }
                                    }

                                    return new IndexedInts() {
                                        @Override
                                        public int size() {
                                            return vals.size();
                                        }

                                        @Override
                                        public int get(int index) {
                                            return vals.get(index);
                                        }

                                        @Override
                                        public Iterator<Integer> iterator() {
                                            return vals.iterator();
                                        }

                                        @Override
                                        public void fill(int index, int[] toFill) {
                                            throw new UnsupportedOperationException("fill not supported");
                                        }

                                        @Override
                                        public void close() throws IOException {

                                        }
                                    };
                                }

                                @Override
                                public int getValueCardinality() {
                                    return maxId;
                                }

                                @Override
                                public String lookupName(int id) {
                                    final String value = dimValLookup.getValue(id);
                                    return extractionFn == null ? value : extractionFn.apply(value);

                                }

                                @Override
                                public int lookupId(String name) {
                                    if (extractionFn != null) {
                                        throw new UnsupportedOperationException(
                                                "cannot perform lookup when applying an extraction function");
                                    }
                                    return dimValLookup.getId(name);
                                }
                            };
                        }

                        @Override
                        public FloatColumnSelector makeFloatColumnSelector(String columnName) {
                            final Integer metricIndexInt = index.getMetricIndex(columnName);
                            if (metricIndexInt == null) {
                                return new FloatColumnSelector() {
                                    @Override
                                    public float get() {
                                        return 0.0f;
                                    }
                                };
                            }

                            final int metricIndex = metricIndexInt;
                            return new FloatColumnSelector() {
                                @Override
                                public float get() {
                                    return index.getMetricFloatValue(currEntry.getValue(), metricIndex);
                                }
                            };
                        }

                        @Override
                        public LongColumnSelector makeLongColumnSelector(String columnName) {
                            if (columnName.equals(Column.TIME_COLUMN_NAME)) {
                                return new LongColumnSelector() {
                                    @Override
                                    public long get() {
                                        return currEntry.getKey().getTimestamp();
                                    }
                                };
                            }
                            final Integer metricIndexInt = index.getMetricIndex(columnName);
                            if (metricIndexInt == null) {
                                return new LongColumnSelector() {
                                    @Override
                                    public long get() {
                                        return 0L;
                                    }
                                };
                            }

                            final int metricIndex = metricIndexInt;

                            return new LongColumnSelector() {
                                @Override
                                public long get() {
                                    return index.getMetricLongValue(currEntry.getValue(), metricIndex);
                                }
                            };
                        }

                        @Override
                        public ObjectColumnSelector makeObjectColumnSelector(String column) {
                            if (column.equals(Column.TIME_COLUMN_NAME)) {
                                return new ObjectColumnSelector<Long>() {
                                    @Override
                                    public Class classOfObject() {
                                        return Long.TYPE;
                                    }

                                    @Override
                                    public Long get() {
                                        return currEntry.getKey().getTimestamp();
                                    }
                                };
                            }

                            final Integer metricIndexInt = index.getMetricIndex(column);
                            if (metricIndexInt != null) {
                                final int metricIndex = metricIndexInt;

                                final ComplexMetricSerde serde = ComplexMetrics
                                        .getSerdeForType(index.getMetricType(column));
                                return new ObjectColumnSelector() {
                                    @Override
                                    public Class classOfObject() {
                                        return serde.getObjectStrategy().getClazz();
                                    }

                                    @Override
                                    public Object get() {
                                        return index.getMetricObjectValue(currEntry.getValue(), metricIndex);
                                    }
                                };
                            }

                            final Integer dimensionIndexInt = index.getDimensionIndex(column);

                            if (dimensionIndexInt != null) {
                                final int dimensionIndex = dimensionIndexInt;
                                return new ObjectColumnSelector<Object>() {
                                    @Override
                                    public Class classOfObject() {
                                        return Object.class;
                                    }

                                    @Override
                                    public Object get() {
                                        IncrementalIndex.TimeAndDims key = currEntry.getKey();
                                        if (key == null) {
                                            return null;
                                        }

                                        String[][] dims = key.getDims();
                                        if (dimensionIndex >= dims.length) {
                                            return null;
                                        }

                                        final String[] dimVals = dims[dimensionIndex];
                                        if (dimVals == null || dimVals.length == 0) {
                                            return null;
                                        }
                                        if (dimVals.length == 1) {
                                            return dimVals[0];
                                        }
                                        return dimVals;
                                    }
                                };
                            }

                            return null;
                        }
                    };
                }
            });
}

From source file:io.druid.segment.IndexMerger.java

License:Apache License

public static void createIndexDrdFile(byte versionId, File inDir, GenericIndexed<String> availableDimensions,
        GenericIndexed<String> availableMetrics, Interval dataInterval, BitmapSerdeFactory bitmapSerdeFactory)
        throws IOException {
    File indexFile = new File(inDir, "index.drd");

    try (FileChannel channel = new FileOutputStream(indexFile).getChannel()) {
        channel.write(ByteBuffer.wrap(new byte[] { versionId }));

        availableDimensions.writeToChannel(channel);
        availableMetrics.writeToChannel(channel);
        serializerUtils.writeString(channel,
                String.format("%s/%s", dataInterval.getStart(), dataInterval.getEnd()));
        serializerUtils.writeString(channel, mapper.writeValueAsString(bitmapSerdeFactory));
    }//  w  ww. j ava 2s.  com
    IndexIO.checkFileSize(indexFile);
}

From source file:io.druid.segment.QueryableIndexStorageAdapter.java

License:Apache License

@Override
public Sequence<Cursor> makeCursors(Filter filter, Interval interval, QueryGranularity gran) {
    Interval actualInterval = interval;

    long minDataTimestamp = getMinTime().getMillis();
    long maxDataTimestamp = getMaxTime().getMillis();
    final Interval dataInterval = new Interval(minDataTimestamp, gran.next(gran.truncate(maxDataTimestamp)));

    if (!actualInterval.overlaps(dataInterval)) {
        return Sequences.empty();
    }/*w  ww  .  j  a  v a 2  s .  c o m*/

    if (actualInterval.getStart().isBefore(dataInterval.getStart())) {
        actualInterval = actualInterval.withStart(dataInterval.getStart());
    }
    if (actualInterval.getEnd().isAfter(dataInterval.getEnd())) {
        actualInterval = actualInterval.withEnd(dataInterval.getEnd());
    }

    final Offset offset;
    if (filter == null) {
        offset = new NoFilterOffset(0, index.getNumRows());
    } else {
        final ColumnSelectorBitmapIndexSelector selector = new ColumnSelectorBitmapIndexSelector(
                index.getBitmapFactoryForDimensions(), index);

        offset = new BitmapOffset(selector.getBitmapFactory(), filter.getBitmapIndex(selector));
    }

    return Sequences.filter(
            new CursorSequenceBuilder(index, actualInterval, gran, offset, maxDataTimestamp).build(),
            Predicates.<Cursor>notNull());
}

From source file:io.druid.segment.realtime.appenderator.SegmentIdentifier.java

License:Apache License

@JsonCreator
public SegmentIdentifier(@JsonProperty("dataSource") String dataSource,
        @JsonProperty("interval") Interval interval, @JsonProperty("version") String version,
        @JsonProperty("shardSpec") ShardSpec shardSpec) {
    this.dataSource = Preconditions.checkNotNull(dataSource, "dataSource");
    this.interval = Preconditions.checkNotNull(interval, "interval");
    this.version = Preconditions.checkNotNull(version, "version");
    this.shardSpec = Preconditions.checkNotNull(shardSpec, "shardSpec");
    this.asString = DataSegment.makeDataSegmentIdentifier(dataSource, interval.getStart(), interval.getEnd(),
            version, shardSpec);/*from  ww w .  j a  v  a 2 s . co m*/
}

From source file:io.druid.segment.realtime.plumber.RealtimePlumber.java

License:Apache License

protected Object bootstrapSinksFromDisk() {
    final VersioningPolicy versioningPolicy = config.getVersioningPolicy();

    File baseDir = computeBaseDir(schema);
    if (baseDir == null || !baseDir.exists()) {
        return null;
    }/*from  ww  w.  j a va  2s.  c o m*/

    File[] files = baseDir.listFiles();
    if (files == null) {
        return null;
    }

    Object metadata = null;
    long latestCommitTime = 0;
    for (File sinkDir : files) {
        Interval sinkInterval = new Interval(sinkDir.getName().replace("_", "/"));

        //final File[] sinkFiles = sinkDir.listFiles();
        // To avoid reading and listing of "merged" dir
        final File[] sinkFiles = sinkDir.listFiles(new FilenameFilter() {
            @Override
            public boolean accept(File dir, String fileName) {
                return !(Ints.tryParse(fileName) == null);
            }
        });
        Arrays.sort(sinkFiles, new Comparator<File>() {
            @Override
            public int compare(File o1, File o2) {
                try {
                    return Ints.compare(Integer.parseInt(o1.getName()), Integer.parseInt(o2.getName()));
                } catch (NumberFormatException e) {
                    log.error(e, "Couldn't compare as numbers? [%s][%s]", o1, o2);
                    return o1.compareTo(o2);
                }
            }
        });
        boolean isCorrupted = false;
        try {
            List<FireHydrant> hydrants = Lists.newArrayList();
            for (File segmentDir : sinkFiles) {
                log.info("Loading previously persisted segment at [%s]", segmentDir);

                // Although this has been tackled at start of this method.
                // Just a doubly-check added to skip "merged" dir. from being added to hydrants
                // If 100% sure that this is not needed, this check can be removed.
                if (Ints.tryParse(segmentDir.getName()) == null) {
                    continue;
                }
                QueryableIndex queryableIndex = null;
                try {
                    queryableIndex = IndexIO.loadIndex(segmentDir);
                } catch (IOException e) {
                    log.error(e, "Problem loading segmentDir from disk.");
                    isCorrupted = true;
                }
                if (isCorrupted) {
                    try {
                        File corruptSegmentDir = computeCorruptedFileDumpDir(segmentDir, schema);
                        log.info("Renaming %s to %s", segmentDir.getAbsolutePath(),
                                corruptSegmentDir.getAbsolutePath());
                        FileUtils.copyDirectory(segmentDir, corruptSegmentDir);
                        FileUtils.deleteDirectory(segmentDir);
                    } catch (Exception e1) {
                        log.error(e1, "Failed to rename %s", segmentDir.getAbsolutePath());
                    }
                    //Note: skipping corrupted segment might lead to dropping some data. This strategy should be changed
                    //at some point.
                    continue;
                }
                Map<String, Object> segmentMetadata = queryableIndex.getMetaData();
                if (segmentMetadata != null) {
                    Object timestampObj = segmentMetadata.get(COMMIT_METADATA_TIMESTAMP_KEY);
                    if (timestampObj != null) {
                        long timestamp = ((Long) timestampObj).longValue();
                        if (timestamp > latestCommitTime) {
                            log.info(
                                    "Found metaData [%s] with latestCommitTime [%s] greater than previous recorded [%s]",
                                    queryableIndex.getMetaData(), timestamp, latestCommitTime);
                            latestCommitTime = timestamp;
                            metadata = queryableIndex.getMetaData().get(COMMIT_METADATA_KEY);
                        }
                    }
                }
                hydrants.add(
                        new FireHydrant(new QueryableIndexSegment(
                                DataSegment.makeDataSegmentIdentifier(schema.getDataSource(),
                                        sinkInterval.getStart(), sinkInterval.getEnd(),
                                        versioningPolicy.getVersion(sinkInterval), config.getShardSpec()),
                                queryableIndex), Integer.parseInt(segmentDir.getName())));
            }
            if (hydrants.isEmpty()) {
                // Probably encountered a corrupt sink directory
                log.warn(
                        "Found persisted segment directory with no intermediate segments present at %s, skipping sink creation.",
                        sinkDir.getAbsolutePath());
                continue;
            }
            Sink currSink = new Sink(sinkInterval, schema, config, versioningPolicy.getVersion(sinkInterval),
                    hydrants);
            sinks.put(sinkInterval.getStartMillis(), currSink);
            sinkTimeline.add(currSink.getInterval(), currSink.getVersion(),
                    new SingleElementPartitionChunk<Sink>(currSink));

            segmentAnnouncer.announceSegment(currSink.getSegment());
        } catch (IOException e) {
            log.makeAlert(e, "Problem loading sink[%s] from disk.", schema.getDataSource())
                    .addData("interval", sinkInterval).emit();
        }
    }
    return metadata;
}

From source file:io.druid.segment.SegmentDesc.java

License:Apache License

public static String withInterval(final String identifier, Interval newInterval) {
    String[] splits = identifier.split(DataSegment.delimiter);
    if (splits.length < 4) {
        // happens for test segments which has invalid segment id.. ignore for now
        LOGGER.warn("Invalid segment identifier " + identifier);
        return identifier;
    }/* w w  w.jav a2 s .co m*/
    StringBuilder builder = new StringBuilder();
    builder.append(splits[0]).append(DataSegment.delimiter);
    builder.append(newInterval.getStart()).append(DataSegment.delimiter);
    builder.append(newInterval.getEnd()).append(DataSegment.delimiter);
    for (int i = 3; i < splits.length - 1; i++) {
        builder.append(splits[i]).append(DataSegment.delimiter);
    }
    builder.append(splits[splits.length - 1]);

    return builder.toString();
}

From source file:io.druid.server.audit.SQLAuditManager.java

License:Apache License

@Override
public List<AuditEntry> fetchAuditHistory(final String key, final String type, Interval interval) {
    final Interval theInterval = getIntervalOrDefault(interval);
    return dbi.withHandle(new HandleCallback<List<AuditEntry>>() {
        @Override/*  ww w . jav  a 2 s . co  m*/
        public List<AuditEntry> withHandle(Handle handle) throws Exception {
            return handle.createQuery(String.format(
                    "SELECT payload FROM %s WHERE audit_key = :audit_key and type = :type and created_date between :start_date and :end_date ORDER BY created_date",
                    getAuditTable())).bind("audit_key", key).bind("type", type)
                    .bind("start_date", theInterval.getStart().toString())
                    .bind("end_date", theInterval.getEnd().toString()).map(new ResultSetMapper<AuditEntry>() {
                        @Override
                        public AuditEntry map(int index, ResultSet r, StatementContext ctx)
                                throws SQLException {
                            try {
                                return jsonMapper.readValue(r.getBytes("payload"), AuditEntry.class);
                            } catch (IOException e) {
                                throw new SQLException(e);
                            }
                        }
                    }).list();
        }
    });
}

From source file:io.druid.server.audit.SQLAuditManager.java

License:Apache License

@Override
public List<AuditEntry> fetchAuditHistory(final String type, Interval interval) {
    final Interval theInterval = getIntervalOrDefault(interval);
    return dbi.withHandle(new HandleCallback<List<AuditEntry>>() {
        @Override// w  w  w .j  av a2  s .c o  m
        public List<AuditEntry> withHandle(Handle handle) throws Exception {
            return handle.createQuery(String.format(
                    "SELECT payload FROM %s WHERE type = :type and created_date between :start_date and :end_date ORDER BY created_date",
                    getAuditTable())).bind("type", type).bind("start_date", theInterval.getStart().toString())
                    .bind("end_date", theInterval.getEnd().toString()).map(new ResultSetMapper<AuditEntry>() {
                        @Override
                        public AuditEntry map(int index, ResultSet r, StatementContext ctx)
                                throws SQLException {
                            try {
                                return jsonMapper.readValue(r.getBytes("payload"), AuditEntry.class);
                            } catch (IOException e) {
                                throw new SQLException(e);
                            }
                        }
                    }).list();
        }
    });
}