Example usage for org.joda.time Interval Interval

List of usage examples for org.joda.time Interval Interval

Introduction

In this page you can find the example usage for org.joda.time Interval Interval.

Prototype

public Interval(Object interval) 

Source Link

Document

Constructs a time interval by converting or copying from another object.

Usage

From source file:io.druid.query.spec.LegacySegmentSpec.java

License:Apache License

private static List<Interval> convertValue(Object intervals) {
    final List<?> intervalStringList;
    if (intervals instanceof String) {
        intervalStringList = Arrays.asList((((String) intervals).split(",")));
    } else if (intervals instanceof Interval) {
        intervalStringList = Arrays.asList(intervals.toString());
    } else if (intervals instanceof Map) {
        intervalStringList = (List) ((Map) intervals).get("intervals");
    } else if (intervals instanceof List) {
        intervalStringList = (List) intervals;
    } else {//from  ww  w . j a va2s  .  c o m
        throw new IAE("Unknown type[%s] for intervals[%s]", intervals.getClass(), intervals);
    }

    return Lists.transform(intervalStringList, new Function<Object, Interval>() {
        @Override
        public Interval apply(Object input) {
            return new Interval(input);
        }
    });
}

From source file:io.druid.segment.realtime.plumber.RealtimePlumber.java

License:Apache License

protected Object bootstrapSinksFromDisk() {
    final VersioningPolicy versioningPolicy = config.getVersioningPolicy();

    File baseDir = computeBaseDir(schema);
    if (baseDir == null || !baseDir.exists()) {
        return null;
    }/*from www.j  a v a  2  s. c  o  m*/

    File[] files = baseDir.listFiles();
    if (files == null) {
        return null;
    }

    Object metadata = null;
    long latestCommitTime = 0;
    for (File sinkDir : files) {
        Interval sinkInterval = new Interval(sinkDir.getName().replace("_", "/"));

        //final File[] sinkFiles = sinkDir.listFiles();
        // To avoid reading and listing of "merged" dir
        final File[] sinkFiles = sinkDir.listFiles(new FilenameFilter() {
            @Override
            public boolean accept(File dir, String fileName) {
                return !(Ints.tryParse(fileName) == null);
            }
        });
        Arrays.sort(sinkFiles, new Comparator<File>() {
            @Override
            public int compare(File o1, File o2) {
                try {
                    return Ints.compare(Integer.parseInt(o1.getName()), Integer.parseInt(o2.getName()));
                } catch (NumberFormatException e) {
                    log.error(e, "Couldn't compare as numbers? [%s][%s]", o1, o2);
                    return o1.compareTo(o2);
                }
            }
        });
        boolean isCorrupted = false;
        try {
            List<FireHydrant> hydrants = Lists.newArrayList();
            for (File segmentDir : sinkFiles) {
                log.info("Loading previously persisted segment at [%s]", segmentDir);

                // Although this has been tackled at start of this method.
                // Just a doubly-check added to skip "merged" dir. from being added to hydrants
                // If 100% sure that this is not needed, this check can be removed.
                if (Ints.tryParse(segmentDir.getName()) == null) {
                    continue;
                }
                QueryableIndex queryableIndex = null;
                try {
                    queryableIndex = IndexIO.loadIndex(segmentDir);
                } catch (IOException e) {
                    log.error(e, "Problem loading segmentDir from disk.");
                    isCorrupted = true;
                }
                if (isCorrupted) {
                    try {
                        File corruptSegmentDir = computeCorruptedFileDumpDir(segmentDir, schema);
                        log.info("Renaming %s to %s", segmentDir.getAbsolutePath(),
                                corruptSegmentDir.getAbsolutePath());
                        FileUtils.copyDirectory(segmentDir, corruptSegmentDir);
                        FileUtils.deleteDirectory(segmentDir);
                    } catch (Exception e1) {
                        log.error(e1, "Failed to rename %s", segmentDir.getAbsolutePath());
                    }
                    //Note: skipping corrupted segment might lead to dropping some data. This strategy should be changed
                    //at some point.
                    continue;
                }
                Map<String, Object> segmentMetadata = queryableIndex.getMetaData();
                if (segmentMetadata != null) {
                    Object timestampObj = segmentMetadata.get(COMMIT_METADATA_TIMESTAMP_KEY);
                    if (timestampObj != null) {
                        long timestamp = ((Long) timestampObj).longValue();
                        if (timestamp > latestCommitTime) {
                            log.info(
                                    "Found metaData [%s] with latestCommitTime [%s] greater than previous recorded [%s]",
                                    queryableIndex.getMetaData(), timestamp, latestCommitTime);
                            latestCommitTime = timestamp;
                            metadata = queryableIndex.getMetaData().get(COMMIT_METADATA_KEY);
                        }
                    }
                }
                hydrants.add(
                        new FireHydrant(new QueryableIndexSegment(
                                DataSegment.makeDataSegmentIdentifier(schema.getDataSource(),
                                        sinkInterval.getStart(), sinkInterval.getEnd(),
                                        versioningPolicy.getVersion(sinkInterval), config.getShardSpec()),
                                queryableIndex), Integer.parseInt(segmentDir.getName())));
            }
            if (hydrants.isEmpty()) {
                // Probably encountered a corrupt sink directory
                log.warn(
                        "Found persisted segment directory with no intermediate segments present at %s, skipping sink creation.",
                        sinkDir.getAbsolutePath());
                continue;
            }
            Sink currSink = new Sink(sinkInterval, schema, config, versioningPolicy.getVersion(sinkInterval),
                    hydrants);
            sinks.put(sinkInterval.getStartMillis(), currSink);
            sinkTimeline.add(currSink.getInterval(), currSink.getVersion(),
                    new SingleElementPartitionChunk<Sink>(currSink));

            segmentAnnouncer.announceSegment(currSink.getSegment());
        } catch (IOException e) {
            log.makeAlert(e, "Problem loading sink[%s] from disk.", schema.getDataSource())
                    .addData("interval", sinkInterval).emit();
        }
    }
    return metadata;
}

From source file:io.druid.segment.SchemalessIndex.java

License:Apache License

private static QueryableIndex makeAppendedMMappedIndex(Iterable<Pair<String, AggregatorFactory[]>> files,
        final List<Interval> intervals) {
    try {/*from ww w. j  av a2 s.  c  o m*/
        File tmpFile = File.createTempFile("yay", "boo");
        tmpFile.delete();
        File mergedFile = new File(tmpFile, "merged");
        mergedFile.mkdirs();
        mergedFile.deleteOnExit();

        List<File> filesToMap = makeFilesToMap(tmpFile, files);

        VersionedIntervalTimeline<Integer, File> timeline = new VersionedIntervalTimeline<Integer, File>(
                Ordering.natural().nullsFirst());

        ShardSpec noneShardSpec = new NoneShardSpec();

        for (int i = 0; i < intervals.size(); i++) {
            timeline.add(intervals.get(i), i, noneShardSpec.createChunk(filesToMap.get(i)));
        }

        final List<IndexableAdapter> adapters = Lists.newArrayList(Iterables.concat(
                // TimelineObjectHolder is actually an iterable of iterable of indexable adapters
                Iterables.transform(timeline.lookup(new Interval("1000-01-01/3000-01-01")),
                        new Function<TimelineObjectHolder<Integer, File>, Iterable<IndexableAdapter>>() {
                            @Override
                            public Iterable<IndexableAdapter> apply(
                                    final TimelineObjectHolder<Integer, File> timelineObjectHolder) {
                                return Iterables.transform(timelineObjectHolder.getObject(),

                                        // Each chunk can be used to build the actual IndexableAdapter
                                        new Function<PartitionChunk<File>, IndexableAdapter>() {
                                            @Override
                                            public IndexableAdapter apply(PartitionChunk<File> chunk) {
                                                try {
                                                    return new RowboatFilteringIndexAdapter(
                                                            new QueryableIndexIndexableAdapter(
                                                                    IndexIO.loadIndex(chunk.getObject())),
                                                            new Predicate<Rowboat>() {
                                                                @Override
                                                                public boolean apply(Rowboat input) {
                                                                    return timelineObjectHolder.getInterval()
                                                                            .contains(input.getTimestamp());
                                                                }
                                                            });
                                                } catch (IOException e) {
                                                    throw Throwables.propagate(e);
                                                }
                                            }
                                        });
                            }
                        })));

        return IndexIO.loadIndex(IndexMerger.append(adapters, mergedFile, indexSpec));
    } catch (IOException e) {
        throw Throwables.propagate(e);
    }
}

From source file:io.druid.server.http.CoordinatorDynamicConfigsResource.java

License:Apache License

@GET
@Path("/history")
@Produces(MediaType.APPLICATION_JSON)//from w  w w . j a v a  2 s.  c o m
public Response getDatasourceRuleHistory(@QueryParam("interval") final String interval) {
    Interval theInterval = interval == null ? null : new Interval(interval);
    return Response.ok(auditManager.fetchAuditHistory(CoordinatorDynamicConfig.CONFIG_KEY,
            CoordinatorDynamicConfig.CONFIG_KEY, theInterval)).build();
}

From source file:io.druid.server.http.DatasourcesResource.java

License:Apache License

@DELETE
@Path("/{dataSourceName}")
@Produces(MediaType.APPLICATION_JSON)//from w  ww  . ja v a 2s  .co  m
public Response deleteDataSource(@PathParam("dataSourceName") final String dataSourceName,
        @QueryParam("kill") final String kill, @QueryParam("interval") final String interval) {
    if (indexingServiceClient == null) {
        return Response.ok(ImmutableMap.of("error", "no indexing service found")).build();
    }
    if (kill != null && Boolean.valueOf(kill)) {
        try {
            indexingServiceClient.killSegments(dataSourceName, new Interval(interval));
        } catch (Exception e) {
            return Response.serverError().entity(
                    ImmutableMap.of("error", "Exception occurred. Are you sure you have an indexing service?"))
                    .build();
        }
    } else {
        if (!databaseSegmentManager.removeDatasource(dataSourceName)) {
            return Response.noContent().build();
        }
    }

    return Response.ok().build();
}

From source file:io.druid.server.http.DatasourcesResource.java

License:Apache License

@GET
@Path("/{dataSourceName}/intervals/{interval}")
@Produces(MediaType.APPLICATION_JSON)//from w  ww.  j ava 2  s .  c o m
public Response getSegmentDataSourceSpecificInterval(@PathParam("dataSourceName") String dataSourceName,
        @PathParam("interval") String interval, @QueryParam("simple") String simple,
        @QueryParam("full") String full) {
    final DruidDataSource dataSource = getDataSource(dataSourceName);
    final Interval theInterval = new Interval(interval.replace("_", "/"));

    if (dataSource == null) {
        return Response.noContent().build();
    }

    final Comparator<Interval> comparator = Comparators.inverse(Comparators.intervalsByStartThenEnd());
    if (full != null) {
        final Map<Interval, Map<String, Object>> retVal = Maps.newTreeMap(comparator);
        for (DataSegment dataSegment : dataSource.getSegments()) {
            if (theInterval.contains(dataSegment.getInterval())) {
                Map<String, Object> segments = retVal.get(dataSegment.getInterval());
                if (segments == null) {
                    segments = Maps.newHashMap();
                    retVal.put(dataSegment.getInterval(), segments);
                }

                Pair<DataSegment, Set<String>> val = getSegment(dataSegment.getIdentifier());
                segments.put(dataSegment.getIdentifier(),
                        ImmutableMap.of("metadata", val.lhs, "servers", val.rhs));
            }
        }

        return Response.ok(retVal).build();
    }

    if (simple != null) {
        final Map<Interval, Map<String, Object>> retVal = Maps.newHashMap();
        for (DataSegment dataSegment : dataSource.getSegments()) {
            if (theInterval.contains(dataSegment.getInterval())) {
                Map<String, Object> properties = retVal.get(dataSegment.getInterval());
                if (properties == null) {
                    properties = Maps.newHashMap();
                    properties.put("size", dataSegment.getSize());
                    properties.put("count", 1);

                    retVal.put(dataSegment.getInterval(), properties);
                } else {
                    properties.put("size", MapUtils.getLong(properties, "size", 0L) + dataSegment.getSize());
                    properties.put("count", MapUtils.getInt(properties, "count", 0) + 1);
                }
            }
        }

        return Response.ok(retVal).build();
    }

    final Set<String> retVal = Sets.newTreeSet(Comparators.inverse(String.CASE_INSENSITIVE_ORDER));
    for (DataSegment dataSegment : dataSource.getSegments()) {
        if (theInterval.contains(dataSegment.getInterval())) {
            retVal.add(dataSegment.getIdentifier());
        }
    }

    return Response.ok(retVal).build();
}

From source file:io.druid.server.http.IntervalsResource.java

License:Apache License

@GET
@Path("/{interval}")
@Produces(MediaType.APPLICATION_JSON)/* www .ja  v a  2s  . co  m*/
public Response getSpecificIntervals(@PathParam("interval") String interval,
        @QueryParam("simple") String simple, @QueryParam("full") String full,
        @Context final HttpServletRequest req) {
    final Interval theInterval = new Interval(interval.replace("_", "/"));
    final Set<DruidDataSource> datasources = authConfig.isEnabled()
            ? InventoryViewUtils.getSecuredDataSources(serverInventoryView,
                    (AuthorizationInfo) req.getAttribute(AuthConfig.DRUID_AUTH_TOKEN))
            : InventoryViewUtils.getDataSources(serverInventoryView);

    final Comparator<Interval> comparator = Comparators.inverse(Comparators.intervalsByStartThenEnd());

    if (full != null) {
        final Map<Interval, Map<String, Map<String, Object>>> retVal = Maps.newTreeMap(comparator);
        for (DruidDataSource dataSource : datasources) {
            for (DataSegment dataSegment : dataSource.getSegments()) {
                if (theInterval.contains(dataSegment.getInterval())) {
                    Map<String, Map<String, Object>> dataSourceInterval = retVal.get(dataSegment.getInterval());
                    if (dataSourceInterval == null) {
                        Map<String, Map<String, Object>> tmp = Maps.newHashMap();
                        retVal.put(dataSegment.getInterval(), tmp);
                    }
                    setProperties(retVal, dataSource, dataSegment);
                }
            }
        }

        return Response.ok(retVal).build();
    }

    if (simple != null) {
        final Map<Interval, Map<String, Object>> retVal = Maps.newHashMap();
        for (DruidDataSource dataSource : datasources) {
            for (DataSegment dataSegment : dataSource.getSegments()) {
                if (theInterval.contains(dataSegment.getInterval())) {
                    Map<String, Object> properties = retVal.get(dataSegment.getInterval());
                    if (properties == null) {
                        properties = Maps.newHashMap();
                        properties.put("size", dataSegment.getSize());
                        properties.put("count", 1);

                        retVal.put(dataSegment.getInterval(), properties);
                    } else {
                        properties.put("size",
                                MapUtils.getLong(properties, "size", 0L) + dataSegment.getSize());
                        properties.put("count", MapUtils.getInt(properties, "count", 0) + 1);
                    }
                }
            }
        }

        return Response.ok(retVal).build();
    }

    final Map<String, Object> retVal = Maps.newHashMap();
    for (DruidDataSource dataSource : datasources) {
        for (DataSegment dataSegment : dataSource.getSegments()) {
            if (theInterval.contains(dataSegment.getInterval())) {
                retVal.put("size", MapUtils.getLong(retVal, "size", 0L) + dataSegment.getSize());
                retVal.put("count", MapUtils.getInt(retVal, "count", 0) + 1);
            }
        }
    }

    return Response.ok(retVal).build();
}

From source file:io.druid.server.http.RulesResource.java

License:Apache License

@GET
@Path("/{dataSourceName}/history")
@Produces(MediaType.APPLICATION_JSON)/*from w ww  . jav  a2s  . com*/
public Response getDatasourceRuleHistory(@PathParam("dataSourceName") final String dataSourceName,
        @QueryParam("interval") final String interval) {
    Interval theInterval = interval == null ? null : new Interval(interval);
    return Response.ok(auditManager.fetchAuditHistory(dataSourceName, "rules", theInterval)).build();
}

From source file:io.druid.server.http.RulesResource.java

License:Apache License

@GET
@Path("/history")
@Produces(MediaType.APPLICATION_JSON)/*from  ww  w. j a v  a2  s.  c o m*/
public Response getDatasourceRuleHistory(@QueryParam("interval") final String interval) {
    try {
        Interval theInterval = interval == null ? null : new Interval(interval);
        return Response.ok(auditManager.fetchAuditHistory("rules", theInterval)).build();
    } catch (IllegalArgumentException e) {
        return Response.serverError().entity(ImmutableMap.<String, Object>of("error", e.getMessage())).build();
    }
}

From source file:io.imply.druid.query.ExampleMain.java

License:Apache License

public static void main(String[] args) throws Exception {
    final String host = args.length == 0 ? "localhost:8082" : args[0];
    try (final DruidClient druidClient = DruidClient.create(host)) {
        // Create a simple select query using the Druids query builder.
        final int threshold = 50;
        final SelectQuery selectQuery = Druids.newSelectQueryBuilder().dataSource("wikiticker")
                .intervals(ImmutableList.of(new Interval("1000/3000")))
                .filters(new AndDimFilter(
                        ImmutableList.<DimFilter>of(new SelectorDimFilter("countryName", "United States", null),
                                new SelectorDimFilter("cityName", "San Francisco", null))))
                .dimensions(ImmutableList.of("page", "user")).pagingSpec(new PagingSpec(null, threshold))
                .build();//from  www .ja  va  2  s .c  om

        // Fetch the results.
        final long startTime = System.currentTimeMillis();
        final Sequence<Result<SelectResultValue>> resultSequence = druidClient.execute(selectQuery);
        final List<Result<SelectResultValue>> resultList = Sequences.toList(resultSequence,
                Lists.<Result<SelectResultValue>>newArrayList());
        final long fetchTime = System.currentTimeMillis() - startTime;

        // Print the results.
        int resultCount = 0;
        for (final Result<SelectResultValue> result : resultList) {
            for (EventHolder eventHolder : result.getValue().getEvents()) {
                System.out.println(eventHolder.getEvent());
                resultCount++;
            }
        }

        // Print statistics.
        System.out.println(String.format("Fetched %,d rows in %,dms.", resultCount, fetchTime));
    }
}