Example usage for org.joda.time DateTimeComparator getInstance

List of usage examples for org.joda.time DateTimeComparator getInstance

Introduction

In this page you can find the example usage for org.joda.time DateTimeComparator getInstance.

Prototype

public static DateTimeComparator getInstance() 

Source Link

Document

Returns a DateTimeComparator the compares the entire date time value.

Usage

From source file:com.ebay.pulsar.analytics.metricstore.druid.query.DruidQueryProcessor.java

License:MIT License

@SuppressWarnings("unchecked")
private List<Map<String, Object>> postQuery(String druidRsp, DruidSpecs druidSpecs, boolean groupBy4TopN,
        boolean expandTopN) throws IOException, JsonParseException, JsonMappingException, ParseException,
        DataSourceConfigurationException, DataSourceException, SqlTranslationException,
        InvalidQueryParameterException {
    List<String> dimensions = druidSpecs.getDimensions();
    Set<String> hllSet = getAggregateHLL(druidSpecs.getAggregators());

    ObjectMapper mapper = new ObjectMapper();
    List<Map<String, Object>> listOfResult = mapper.readValue(druidRsp,
            new TypeReference<List<Map<String, Object>>>() {
            });/*  www .j  a  v a  2s.  c  om*/

    List<ColumnValueCollector<String>> valueCollectorList = Lists.newArrayList();
    if (getResultEnrichers() != null) {
        for (Map.Entry<String, ResultEnricher> entry : getResultEnrichers().entrySet()) {
            valueCollectorList.add(new ColumnValueCollector<String>(entry.getKey(), Sets.<String>newHashSet()));
        }
    }

    List<Map<String, Object>> rsp = Lists.newArrayList();
    List<String> topNDimensions = null;
    if (expandTopN) {
        topNDimensions = resortTopN(druidSpecs, dimensions.get(0), listOfResult);
    }

    for (Map<String, Object> resultMap : listOfResult) {
        DateTime dt = PulsarDateTimeFormatter.OUTPUT_DRUID_TIME_FORMATTER
                .parseDateTime(resultMap.get(TIMESTAMP).toString());
        DateTime start = druidSpecs.getIntervals().getStart();
        int compareDate = DateTimeComparator.getInstance().compare(dt, start);
        if (compareDate < 0) {
            dt = start;
        }
        String dtMST = PulsarDateTimeFormatter.OUTPUTTIME_FORMATTER.print(dt);
        resultMap.put(TIMESTAMP, dtMST);

        // Reformat druid group by query result
        if (dimensions != null && (dimensions.size() > 1 || groupBy4TopN)) {
            resultMap.put(RESULT, resultMap.get("event"));
            resultMap.remove("event");
            resultMap.remove("version");
        }

        // Reformat Non-topN queries
        if (dimensions == null || dimensions.size() == 0 || dimensions.size() > 1 || groupBy4TopN) {
            Map<String, Object> result = (Map<String, Object>) resultMap.get(RESULT);
            Map<String, Object> reviseResult = reviseResult(dimensions, druidSpecs.getNameAliasMap(), hllSet,
                    valueCollectorList, result);
            resultMap.put(RESULT, reviseResult);
        } else {
            // //Reformat real topN queries
            List<Map<String, Object>> resultList = (List<Map<String, Object>>) resultMap.get(RESULT);
            if (resultList == null || resultList.size() == 0) {
                Map<String, Object> newResultMap = Maps.newHashMap();
                newResultMap.put(TIMESTAMP, dtMST);
                newResultMap.put(RESULT, ImmutableMap.of());
                rsp.add(newResultMap);
            } else {
                for (Map<String, Object> result : resultList) {
                    if (topNDimensions != null && !topNDimensions
                            .contains(Strings.nullToEmpty((String) result.get(dimensions.get(0))))) {
                        // Only the real TopN dimension is kept
                        continue;
                    }

                    Map<String, Object> reviseResult = reviseResult(dimensions, druidSpecs.getNameAliasMap(),
                            hllSet, valueCollectorList, result);
                    Map<String, Object> newResultMap = Maps.newHashMap();
                    newResultMap.put(TIMESTAMP, dtMST);
                    newResultMap.put(RESULT, reviseResult);
                    rsp.add(newResultMap);
                }
            }
        }
    }

    if (dimensions != null && dimensions.size() == 1 && !groupBy4TopN)
        listOfResult = rsp;

    for (ColumnValueCollector<String> columnValueCollector : valueCollectorList) {
        if (getResultEnrichers() != null) {
            if (!columnValueCollector.getValueCollection().isEmpty()) {
                Map<String, ResultNode> enrichedResult = getResultEnrichers()
                        .get(columnValueCollector.getColumnName())
                        .enrich(columnValueCollector.getValueCollection());
                for (Map<String, Object> resultMap : listOfResult) {
                    if (resultMap.get(RESULT) != null) {
                        Map<String, Object> result = (Map<String, Object>) resultMap.get(RESULT);
                        if (result.get(columnValueCollector.getColumnName()) != null) {
                            ResultNode resultNode = (ResultNode) enrichedResult
                                    .get(result.get(columnValueCollector.getColumnName()));
                            result.put(resultNode.getName(), resultNode.getValue());
                        }
                    }
                }
            }
        }
    }
    return listOfResult;
}

From source file:com.manydesigns.portofino.pageactions.calendar.EventWeekComparator.java

License:Open Source License

public EventWeekComparator() {
    dateTimeComparator = DateTimeComparator.getInstance();
    stringComparator = new StringComparator();
}

From source file:com.manydesigns.portofino.pageactions.map.PositionComparator.java

License:Open Source License

public PositionComparator() {
    dateTimeComparator = DateTimeComparator.getInstance();
    stringComparator = new StringComparator();
}

From source file:com.metamx.druid.indexer.DeterminePartitionsJob.java

License:Open Source License

public boolean run() {
    try {//from  ww w  .  j  av  a2s  . c  om
        /*
         * Group by (timestamp, dimensions) so we can correctly count dimension values as they would appear
         * in the final segment.
         */

        if (!config.getPartitionsSpec().isAssumeGrouped()) {
            final Job groupByJob = new Job(new Configuration(), String.format(
                    "%s-determine_partitions_groupby-%s", config.getDataSource(), config.getIntervals()));

            injectSystemProperties(groupByJob);
            groupByJob.setInputFormatClass(TextInputFormat.class);
            groupByJob.setMapperClass(DeterminePartitionsGroupByMapper.class);
            groupByJob.setMapOutputKeyClass(BytesWritable.class);
            groupByJob.setMapOutputValueClass(NullWritable.class);
            groupByJob.setCombinerClass(DeterminePartitionsGroupByReducer.class);
            groupByJob.setReducerClass(DeterminePartitionsGroupByReducer.class);
            groupByJob.setOutputKeyClass(BytesWritable.class);
            groupByJob.setOutputValueClass(NullWritable.class);
            groupByJob.setOutputFormatClass(SequenceFileOutputFormat.class);
            groupByJob.setJarByClass(DeterminePartitionsJob.class);

            config.addInputPaths(groupByJob);
            config.intoConfiguration(groupByJob);
            FileOutputFormat.setOutputPath(groupByJob, config.makeGroupedDataDir());

            groupByJob.submit();
            log.info("Job %s submitted, status available at: %s", groupByJob.getJobName(),
                    groupByJob.getTrackingURL());

            if (!groupByJob.waitForCompletion(true)) {
                log.error("Job failed: %s", groupByJob.getJobID());
                return false;
            }
        } else {
            log.info("Skipping group-by job.");
        }

        /*
         * Read grouped data and determine appropriate partitions.
         */
        final Job dimSelectionJob = new Job(new Configuration(), String.format(
                "%s-determine_partitions_dimselection-%s", config.getDataSource(), config.getIntervals()));

        dimSelectionJob.getConfiguration().set("io.sort.record.percent", "0.19");

        injectSystemProperties(dimSelectionJob);

        if (!config.getPartitionsSpec().isAssumeGrouped()) {
            // Read grouped data from the groupByJob.
            dimSelectionJob.setMapperClass(DeterminePartitionsDimSelectionPostGroupByMapper.class);
            dimSelectionJob.setInputFormatClass(SequenceFileInputFormat.class);
            FileInputFormat.addInputPath(dimSelectionJob, config.makeGroupedDataDir());
        } else {
            // Directly read the source data, since we assume it's already grouped.
            dimSelectionJob.setMapperClass(DeterminePartitionsDimSelectionAssumeGroupedMapper.class);
            dimSelectionJob.setInputFormatClass(TextInputFormat.class);
            config.addInputPaths(dimSelectionJob);
        }

        SortableBytes.useSortableBytesAsMapOutputKey(dimSelectionJob);
        dimSelectionJob.setMapOutputValueClass(Text.class);
        dimSelectionJob.setCombinerClass(DeterminePartitionsDimSelectionCombiner.class);
        dimSelectionJob.setReducerClass(DeterminePartitionsDimSelectionReducer.class);
        dimSelectionJob.setOutputKeyClass(BytesWritable.class);
        dimSelectionJob.setOutputValueClass(Text.class);
        dimSelectionJob.setOutputFormatClass(DeterminePartitionsDimSelectionOutputFormat.class);
        dimSelectionJob.setJarByClass(DeterminePartitionsJob.class);

        config.intoConfiguration(dimSelectionJob);
        FileOutputFormat.setOutputPath(dimSelectionJob, config.makeIntermediatePath());

        dimSelectionJob.submit();
        log.info("Job %s submitted, status available at: %s", dimSelectionJob.getJobName(),
                dimSelectionJob.getTrackingURL());

        if (!dimSelectionJob.waitForCompletion(true)) {
            log.error("Job failed: %s", dimSelectionJob.getJobID().toString());
            return false;
        }

        /*
         * Load partitions determined by the previous job.
         */

        log.info("Job completed, loading up partitions for intervals[%s].",
                config.getSegmentGranularIntervals());
        FileSystem fileSystem = null;
        Map<DateTime, List<HadoopyShardSpec>> shardSpecs = Maps.newTreeMap(DateTimeComparator.getInstance());
        int shardCount = 0;
        for (Interval segmentGranularity : config.getSegmentGranularIntervals()) {
            DateTime bucket = segmentGranularity.getStart();

            final Path partitionInfoPath = config.makeSegmentPartitionInfoPath(new Bucket(0, bucket, 0));
            if (fileSystem == null) {
                fileSystem = partitionInfoPath.getFileSystem(dimSelectionJob.getConfiguration());
            }
            if (fileSystem.exists(partitionInfoPath)) {
                List<ShardSpec> specs = config.jsonMapper.readValue(
                        Utils.openInputStream(dimSelectionJob, partitionInfoPath),
                        new TypeReference<List<ShardSpec>>() {
                        });

                List<HadoopyShardSpec> actualSpecs = Lists.newArrayListWithExpectedSize(specs.size());
                for (int i = 0; i < specs.size(); ++i) {
                    actualSpecs.add(new HadoopyShardSpec(specs.get(i), shardCount++));
                    log.info("DateTime[%s], partition[%d], spec[%s]", bucket, i, actualSpecs.get(i));
                }

                shardSpecs.put(bucket, actualSpecs);
            } else {
                log.info("Path[%s] didn't exist!?", partitionInfoPath);
            }
        }
        config.setShardSpecs(shardSpecs);

        return true;
    } catch (Exception e) {
        throw Throwables.propagate(e);
    }
}

From source file:com.metamx.druid.indexer.HadoopDruidIndexerJob.java

License:Open Source License

@Override
public boolean run() {
    List<Jobby> jobs = Lists.newArrayList();

    ensurePaths();/*from   ww w . ja  v  a 2s.c o  m*/

    if (config.partitionByDimension()) {
        jobs.add(new DeterminePartitionsJob(config));
    } else {
        Map<DateTime, List<HadoopyShardSpec>> shardSpecs = Maps.newTreeMap(DateTimeComparator.getInstance());
        int shardCount = 0;
        for (Interval segmentGranularity : config.getSegmentGranularIntervals()) {
            DateTime bucket = segmentGranularity.getStart();
            final HadoopyShardSpec spec = new HadoopyShardSpec(new NoneShardSpec(), shardCount++);
            shardSpecs.put(bucket, Lists.newArrayList(spec));
            log.info("DateTime[%s], spec[%s]", bucket, spec);
        }
        config.setShardSpecs(shardSpecs);
    }

    indexJob = new IndexGeneratorJob(config);
    jobs.add(indexJob);

    if (dbUpdaterJob != null) {
        jobs.add(dbUpdaterJob);
    } else {
        log.info("No updaterJobSpec set, not uploading to database");
    }

    String failedMessage = null;
    for (Jobby job : jobs) {
        if (failedMessage == null) {
            if (!job.run()) {
                failedMessage = String.format("Job[%s] failed!", job.getClass());
            }
        }
    }

    if (!config.isLeaveIntermediate()) {
        if (failedMessage == null || config.isCleanupOnFailure()) {
            Path workingPath = config.makeIntermediatePath();
            log.info("Deleting path[%s]", workingPath);
            try {
                workingPath.getFileSystem(new Configuration()).delete(workingPath, true);
            } catch (IOException e) {
                log.error(e, "Failed to cleanup path[%s]", workingPath);
            }
        }
    }

    if (failedMessage != null) {
        throw new ISE(failedMessage);
    }

    return true;
}

From source file:com.metamx.druid.indexing.coordinator.TaskRunnerWorkItem.java

License:Open Source License

@Override
public int compareTo(TaskRunnerWorkItem taskRunnerWorkItem) {
    return ComparisonChain.start()
            .compare(createdTime, taskRunnerWorkItem.getCreatedTime(), DateTimeComparator.getInstance())
            .compare(task.getId(), taskRunnerWorkItem.getTask().getId()).result();
}

From source file:com.metamx.druid.merger.coordinator.TaskRunnerWorkItem.java

License:Open Source License

@Override
public int compareTo(TaskRunnerWorkItem taskRunnerWorkItem) {
    return DateTimeComparator.getInstance().compare(createdTime, taskRunnerWorkItem.getCreatedTime());
}

From source file:com.sos.scheduler.model.objects.JSObjHolidaysWeekdays.java

License:Apache License

public List<DateTime> getDtHolidays(Interval timeRange) {
    List<DateTime> result = new ArrayList<DateTime>();
    Iterator<Day> it = getDay().iterator();
    while (it.hasNext()) {
        Day d = it.next();/*  w w  w . ja  v  a2 s .co  m*/
        JSObjHolidaysWeekdaysDay day = new JSObjHolidaysWeekdaysDay(objFactory);
        day.setObjectFieldsFrom(d);
        result.addAll(day.getDtHolidays(timeRange));
    }
    Collections.sort(result, DateTimeComparator.getInstance());
    return result;
}

From source file:com.sos.scheduler.model.objects.JSObjHolidaysWeekdaysDay.java

License:Apache License

public List<DateTime> getDtHolidays(Interval timeRange) {
    List<DateTime> result = new ArrayList<DateTime>();
    List<DateTime> work = getNextSingleStarts(timeRange.getStart());
    for (DateTime date : work) {
        if (timeRange.contains(date)) {
            while (timeRange.contains(date)) {
                result.add(date);/*from   w w  w.j a  v  a  2  s  .  c om*/
                date = date.plusWeeks(1);
            }
        }
    }
    Collections.sort(result, DateTimeComparator.getInstance());
    return result;
}

From source file:com.sos.scheduler.model.objects.JSObjHolidaysWeekdaysDay.java

License:Apache License

private List<DateTime> getNextSingleStarts(DateTime baseDate) {
    DateTimeFormatter fmtDate = DateTimeFormat.forPattern("yyyy-MM-dd");
    DateTimeFormatter fmtDateTime = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss");
    List<DateTime> result = new ArrayList<DateTime>();
    logger.debug(getDay().size() + " day elements detected.");
    Iterator<String> it = getDay().iterator();
    while (it.hasNext()) {
        String dayString = it.next();
        logger.debug("parsing day string " + dayString);
        List<Integer> days = JodaTools.getJodaWeekdays(dayString);
        for (int i = 0; i < days.size(); i++) {
            DateTime nextWeekDay = JodaTools.getNextWeekday(baseDate, days.get(i));
            logger.debug("calculated date " + fmtDate.print(nextWeekDay));
            if (nextWeekDay.isBefore(baseDate)) {
                nextWeekDay = nextWeekDay.plusWeeks(1);
                logger.debug("start is corrected to " + fmtDateTime.print(nextWeekDay));
            }//from   www  . j av a 2 s .c  o  m
            result.add(nextWeekDay);
        }
        Collections.sort(result, DateTimeComparator.getInstance());
    }
    return result;
}