Example usage for org.joda.time.format DateTimeFormat forPattern

List of usage examples for org.joda.time.format DateTimeFormat forPattern

Introduction

In this page you can find the example usage for org.joda.time.format DateTimeFormat forPattern.

Prototype

public static DateTimeFormatter forPattern(String pattern) 

Source Link

Document

Factory to create a formatter from a pattern string.

Usage

From source file:com.linkedin.cubert.utils.DateTimeUtilities.java

License:Open Source License

public static DateTime getDateTime(String ts, String formatStr) {
    DateTimeFormatter formatter = DateTimeFormat.forPattern(formatStr);
    DateTime result = DateTime.parse(ts, formatter);
    return result;
}

From source file:com.linkedin.cubert.utils.FileSystemUtils.java

License:Open Source License

public static List<Path> getPaths(FileSystem fs, JsonNode json, boolean schemaOnly, JsonNode params)
        throws IOException {
    if (json.isArray()) {
        List<Path> paths = new ArrayList<Path>();
        // If the specified input is array, recursively get paths for each item in the
        // array//from w  ww. j  av a 2s .c o m
        ArrayNode anode = (ArrayNode) json;
        for (int i = 0; i < anode.size(); i++) {
            paths.addAll(getPaths(fs, json.get(i), params));
        }
        return paths;
    } else if (json.isTextual()) {
        return getPaths(fs, new Path(json.getTextValue()));
    } else {
        List<Path> paths = new ArrayList<Path>();
        Path root = new Path(getText(json, "root"));
        Path basePath = root;
        JsonNode startDateJson = json.get("startDate");
        if (schemaOnly && json.get("origStartDate") != null)
            startDateJson = json.get("origStartDate");

        JsonNode endDateJson = json.get("endDate");
        if (startDateJson == null || endDateJson == null) {
            throw new IllegalArgumentException("StartDate and endDate need to be specified");
        }
        String startDuration, endDuration;
        if (startDateJson.isTextual()) {
            startDuration = startDateJson.getTextValue();
            endDuration = endDateJson.getTextValue();
        }

        else {
            startDuration = startDateJson.toString();
            endDuration = endDateJson.toString();
        }

        boolean errorOnMissing = false;
        JsonNode errorOnMissingJson = params.get("errorOnMissing");
        if (errorOnMissingJson != null)
            errorOnMissing = Boolean.parseBoolean(errorOnMissingJson.getTextValue());

        boolean useHourlyForMissingDaily = false;
        JsonNode useHourlyForMissingDailyJson = params.get("useHourlyForMissingDaily");
        if (useHourlyForMissingDailyJson != null)
            useHourlyForMissingDaily = Boolean.parseBoolean(useHourlyForMissingDailyJson.getTextValue());

        DateTimeFormatter dtf = DateTimeFormat.forPattern("yyyyMMdd");
        DateTimeFormatter dtfwHour = DateTimeFormat.forPattern("yyyyMMddHH");
        DateTime startDate, endDate;
        boolean isDaily;
        int hourStep;
        if (startDuration.length() == 8) {
            if (endDuration.length() != 8)
                throw new IllegalArgumentException(
                        "EndDate " + endDuration + " is not consistent with StartDate " + startDuration);
            startDate = dtf.parseDateTime(startDuration);
            endDate = dtf.parseDateTime(endDuration);
            isDaily = true;
            hourStep = 24;
        } else if (startDuration.length() == 10) {
            if (endDuration.length() != 10)
                throw new IllegalArgumentException(
                        "EndDate " + endDuration + " is not consistent with StartDate " + startDuration);
            startDate = dtfwHour.parseDateTime(startDuration);
            endDate = dtfwHour.parseDateTime(endDuration);
            isDaily = false;
            hourStep = 1;
        } else {
            throw new IllegalArgumentException(
                    "Cannot parse StartDate " + startDuration + " as daily or hourly duration");

        }

        for (Path path : getPaths(fs, root)) {
            if (isDaily) {
                if (path.getName().equals("daily"))
                    basePath = path;
                else
                    basePath = new Path(path, "daily");
            } else {
                if (path.getName().equals("hourly"))
                    basePath = path;
                else
                    basePath = new Path(path, "hourly");
            }

            //If daily folder itself doesn't exist
            if (!fs.exists(basePath) && isDaily && useHourlyForMissingDaily
                    && fs.exists(new Path(basePath.getParent(), "hourly"))) {
                basePath = new Path(basePath.getParent(), "hourly");
                endDate = endDate.plusHours(23);
                isDaily = false;
                hourStep = 1;
            }

            paths.addAll(getDurationPaths(fs, basePath, startDate, endDate, isDaily, hourStep, errorOnMissing,
                    useHourlyForMissingDaily));
        }

        if (paths.isEmpty() && schemaOnly)
            throw new IOException(String.format("No input files at %s from %s to %s", basePath.toString(),
                    startDuration, endDuration));
        return paths;
    }

}

From source file:com.linkedin.pinot.common.data.DateTimeFormatPatternSpec.java

License:Apache License

public DateTimeFormatPatternSpec(String timeFormat, String sdfPatternWithTz) {
    _timeFormat = TimeFormat.valueOf(timeFormat);
    if (_timeFormat.equals(TimeFormat.SIMPLE_DATE_FORMAT)) {
        Matcher m = SDF_PATTERN_WITH_TIMEZONE.matcher(sdfPatternWithTz);
        _sdfPattern = sdfPatternWithTz;/* w ww  .  j  a va2 s . com*/
        if (m.find()) {
            _sdfPattern = m.group(SDF_PATTERN_GROUP).trim();
            String timezoneString = m.group(TIMEZONE_GROUP).trim();
            _dateTimeZone = DateTimeZone.forTimeZone(TimeZone.getTimeZone(timezoneString));
        }
        _dateTimeFormatter = DateTimeFormat.forPattern(_sdfPattern).withZone(_dateTimeZone);
    }
}

From source file:com.linkedin.pinot.core.indexsegment.generator.SegmentGeneratorConfig.java

License:Apache License

public void setSimpleDateFormat(@Nonnull String simpleDateFormat) {
    _timeColumnType = TimeColumnType.SIMPLE_DATE;
    try {/* w ww .j a va2  s. c  o  m*/
        DateTimeFormat.forPattern(simpleDateFormat);
    } catch (Exception e) {
        throw new RuntimeException("Illegal simple date format specification", e);
    }
    _simpleDateFormat = simpleDateFormat;
}

From source file:com.linkedin.pinot.core.segment.creator.impl.SegmentColumnarIndexCreator.java

License:Apache License

void writeMetadata() throws ConfigurationException {
    PropertiesConfiguration properties = new PropertiesConfiguration(
            new File(_indexDir, V1Constants.MetadataKeys.METADATA_FILE_NAME));

    properties.setProperty(SEGMENT_CREATOR_VERSION, config.getCreatorVersion());
    properties.setProperty(SEGMENT_PADDING_CHARACTER, String.valueOf(V1Constants.Str.DEFAULT_STRING_PAD_CHAR));
    properties.setProperty(SEGMENT_NAME, segmentName);
    properties.setProperty(TABLE_NAME, config.getTableName());
    properties.setProperty(DIMENSIONS, config.getDimensions());
    properties.setProperty(METRICS, config.getMetrics());
    properties.setProperty(DATETIME_COLUMNS, config.getDateTimeColumnNames());
    properties.setProperty(TIME_COLUMN_NAME, config.getTimeColumnName());
    properties.setProperty(TIME_INTERVAL, "not_there");
    properties.setProperty(SEGMENT_TOTAL_RAW_DOCS, String.valueOf(totalRawDocs));
    properties.setProperty(SEGMENT_TOTAL_AGGREGATE_DOCS, String.valueOf(totalAggDocs));
    properties.setProperty(SEGMENT_TOTAL_DOCS, String.valueOf(totalDocs));
    properties.setProperty(STAR_TREE_ENABLED, String.valueOf(config.isEnableStarTreeIndex()));
    properties.setProperty(SEGMENT_TOTAL_ERRORS, String.valueOf(totalErrors));
    properties.setProperty(SEGMENT_TOTAL_NULLS, String.valueOf(totalNulls));
    properties.setProperty(SEGMENT_TOTAL_CONVERSIONS, String.valueOf(totalConversions));
    properties.setProperty(SEGMENT_TOTAL_NULL_COLS, String.valueOf(totalNullCols));

    StarTreeIndexSpec starTreeIndexSpec = config.getStarTreeIndexSpec();
    if (starTreeIndexSpec != null) {
        properties.setProperty(STAR_TREE_SPLIT_ORDER, starTreeIndexSpec.getDimensionsSplitOrder());
        properties.setProperty(STAR_TREE_MAX_LEAF_RECORDS, starTreeIndexSpec.getMaxLeafRecords());
        properties.setProperty(STAR_TREE_SKIP_STAR_NODE_CREATION_FOR_DIMENSIONS,
                starTreeIndexSpec.getSkipStarNodeCreationForDimensions());
        properties.setProperty(STAR_TREE_SKIP_MATERIALIZATION_CARDINALITY,
                starTreeIndexSpec.getSkipMaterializationCardinalityThreshold());
        properties.setProperty(STAR_TREE_SKIP_MATERIALIZATION_FOR_DIMENSIONS,
                starTreeIndexSpec.getSkipMaterializationForDimensions());
    }/*w  w w . j a  v  a2s .c o m*/

    HllConfig hllConfig = config.getHllConfig();
    Map<String, String> derivedHllFieldToOriginMap = null;
    if (hllConfig != null) {
        properties.setProperty(SEGMENT_HLL_LOG2M, hllConfig.getHllLog2m());
        derivedHllFieldToOriginMap = hllConfig.getDerivedHllFieldToOriginMap();
    }

    // Write time related metadata (start time, end time, time unit)
    String timeColumn = config.getTimeColumnName();
    ColumnIndexCreationInfo timeColumnIndexCreationInfo = indexCreationInfoMap.get(timeColumn);
    if (timeColumnIndexCreationInfo != null) {
        // Use start/end time in config if defined
        if (config.getStartTime() != null && config.getEndTime() != null) {
            properties.setProperty(SEGMENT_START_TIME, config.getStartTime());
            properties.setProperty(SEGMENT_END_TIME, config.getEndTime());
        } else {
            Object minTime = timeColumnIndexCreationInfo.getMin();
            Object maxTime = timeColumnIndexCreationInfo.getMax();

            // Convert time value into millis since epoch for SIMPLE_DATE
            if (config.getTimeColumnType() == SegmentGeneratorConfig.TimeColumnType.SIMPLE_DATE) {
                DateTimeFormatter dateTimeFormatter = DateTimeFormat.forPattern(config.getSimpleDateFormat());
                properties.setProperty(SEGMENT_START_TIME, dateTimeFormatter.parseMillis(minTime.toString()));
                properties.setProperty(SEGMENT_END_TIME, dateTimeFormatter.parseMillis(maxTime.toString()));
            } else {
                properties.setProperty(SEGMENT_START_TIME, minTime);
                properties.setProperty(SEGMENT_END_TIME, maxTime);
            }
        }

        properties.setProperty(TIME_UNIT, config.getSegmentTimeUnit());
    }

    for (Map.Entry<String, String> entry : config.getCustomProperties().entrySet()) {
        properties.setProperty(entry.getKey(), entry.getValue());
    }

    for (Map.Entry<String, ColumnIndexCreationInfo> entry : indexCreationInfoMap.entrySet()) {
        String column = entry.getKey();
        ColumnIndexCreationInfo columnIndexCreationInfo = entry.getValue();
        SegmentDictionaryCreator dictionaryCreator = _dictionaryCreatorMap.get(column);
        int dictionaryElementSize = (dictionaryCreator != null) ? dictionaryCreator.getNumBytesPerEntry() : 0;

        // TODO: after fixing the server-side dependency on HAS_INVERTED_INDEX and deployed, set HAS_INVERTED_INDEX properly
        // The hasInvertedIndex flag in segment metadata is picked up in ColumnMetadata, and will be used during the query
        // plan phase. If it is set to false, then inverted indexes are not used in queries even if they are created via table
        // configs on segment load. So, we set it to true here for now, until we fix the server to update the value inside
        // ColumnMetadata, export information to the query planner that the inverted index available is current and can be used.
        //
        //    boolean hasInvertedIndex = invertedIndexCreatorMap.containsKey();
        boolean hasInvertedIndex = true;

        String hllOriginColumn = null;
        if (derivedHllFieldToOriginMap != null) {
            hllOriginColumn = derivedHllFieldToOriginMap.get(column);
        }

        addColumnMetadataInfo(properties, column, columnIndexCreationInfo, totalDocs, totalRawDocs,
                totalAggDocs, schema.getFieldSpecFor(column), _dictionaryCreatorMap.containsKey(column),
                dictionaryElementSize, hasInvertedIndex, hllOriginColumn);
    }

    properties.save();
}

From source file:com.linkedin.pinot.core.segment.creator.impl.SegmentIndexCreationDriverImpl.java

License:Apache License

private long convertSDFToMillis(final String colValue) {
    final String sdfFormatStr = config.getSimpleDateFormat();
    DateTimeFormatter sdfFormatter = DateTimeFormat.forPattern(sdfFormatStr);
    DateTime dateTime = DateTime.parse(colValue, sdfFormatter);
    return dateTime.getMillis();
}

From source file:com.lithium.yoda.DateString.java

License:Apache License

@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
    Preconditions.checkPositionIndex(1, arguments.length);
    timeCop = (LongObjectInspector) arguments[0];
    outFormatter = DateTimeFormat.forPattern(
            ((WritableConstantStringObjectInspector) arguments[1]).getWritableConstantValue().toString());
    return PrimitiveObjectInspectorFactory.javaStringObjectInspector;
}

From source file:com.lithium.yoda.ToEpoch.java

License:Apache License

@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
    if (dateOi instanceof StringObjectInspector) {
        String dateStr = ((StringObjectInspector) dateOi).getPrimitiveJavaObject(arguments[0].get());
        DateTimeFormatter dtf;//  w  w  w  . j ava2 s  .  co m
        if (hasSecondArg) {
            if (constSecondArg == null) {
                dtf = DateTimeFormat.forPattern(formatOi.getPrimitiveJavaObject(arguments[1].get()));
            } else {
                dtf = DateTimeFormat.forPattern(constSecondArg);
            }
        } else {
            dtf = ISODateTimeFormat.dateTimeParser().withOffsetParsed();
        }
        return dtf.parseDateTime(dateStr).getMillis();
    } else if (dateOi instanceof TimestampObjectInspector) {
        Timestamp ts = ((TimestampObjectInspector) dateOi).getPrimitiveJavaObject(arguments[0].get());
        if (hasSecondArg) {
            DateTime dt;
            if (constSecondArg == null) {
                dt = new DateTime(ts.getTime(),
                        DateTimeZone.forID(formatOi.getPrimitiveJavaObject(arguments[1].get())));
            } else {
                dt = new DateTime(ts.getTime(), DateTimeZone.forID(constSecondArg));
            }
            return dt.getMillis();
        } else {
            return ts.getTime();
        }
    }

    return null;
}

From source file:com.loadtesting.showcase.springmvc.model.converter.JodaTimeConverter.java

License:Apache License

public DateTimeFormatter getFormat(DateTimeZone timeZone) {
    DateTimeFormatter result = map.get(timeZone);
    if (result == null) {
        DateTimeFormatter newValue = DateTimeFormat.forPattern(pattern).withZone(timeZone);
        result = map.putIfAbsent(timeZone, newValue);
        if (result == null) {
            result = newValue;//  ww w  .  j a v a2s  .co m
        }
    }
    return result;
}

From source file:com.lyft.hive.serde.DynamoDbSerDe.java

License:Apache License

@Override
public void initialize(Configuration configuration, Properties tbl) throws SerDeException {
    String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS);
    String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);

    columnNames = Arrays.asList(columnNameProperty.split(","));
    columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty);
    assert columnNames.size() == columnTypes.size();
    numColumns = columnNames.size();//from  ww w  . ja  v a  2  s .co m

    String formatString = tbl.getProperty(INPUT_TIMESTAMP_FORMAT);
    if (formatString != null) {
        timestampFormat = DateTimeFormat.forPattern(formatString);
        LOG.warn("Setting timestamp format to: " + formatString);
    }

    /*
     * Constructing the row ObjectInspector:
     * The row consists of some set of primitive columns, each column will
     * be a java object of primitive type.
     */
    List<ObjectInspector> columnOIs = new ArrayList<ObjectInspector>(columnNames.size());
    for (int c = 0; c < numColumns; c++) {
        TypeInfo typeInfo = columnTypes.get(c);
        if (typeInfo instanceof PrimitiveTypeInfo) {
            PrimitiveTypeInfo pti = (PrimitiveTypeInfo) columnTypes.get(c);
            AbstractPrimitiveJavaObjectInspector oi = PrimitiveObjectInspectorFactory
                    .getPrimitiveJavaObjectInspector(pti);
            columnOIs.add(oi);
        } else {
            throw new SerDeException(getClass().getName() + " doesn't allow column [" + c + "] named "
                    + columnNames.get(c) + " with type " + columnTypes.get(c));
        }
    }

    // StandardStruct uses ArrayList to store the row.
    rowOI = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, columnOIs, null);
}