Example usage for java.time LocalDate ofEpochDay

List of usage examples for java.time LocalDate ofEpochDay

Introduction

In this page you can find the example usage for java.time LocalDate ofEpochDay.

Prototype

public static LocalDate ofEpochDay(long epochDay) 

Source Link

Document

Obtains an instance of LocalDate from the epoch day count.

Usage

From source file:Main.java

public static void main(String[] args) {
    LocalDate a = LocalDate.ofEpochDay(1000);

    System.out.println(a);
}

From source file:Main.java

public static void main(String[] args) {
    LocalDate localDate1 = LocalDate.of(2014, 5, 21);
    System.out.println(localDate1);

    LocalDate localDate2 = LocalDate.of(2014, Month.MARCH, 4);
    System.out.println(localDate2);

    LocalDate localDate3 = LocalDate.ofEpochDay(2014);
    System.out.println(localDate3);

    LocalDate localDate4 = LocalDate.ofYearDay(2014, 39);
    System.out.println(localDate4);
}

From source file:Main.java

public static void main(String[] args) {
    // Get the current local date
    LocalDate localDate1 = LocalDate.now();
    System.out.println(localDate1);
    // Create a  local date
    LocalDate localDate2 = LocalDate.of(2014, Month.JUNE, 21);
    System.out.println(localDate2);
    // 10000  days after the epoch date 1970-01-01
    LocalDate localDate3 = LocalDate.ofEpochDay(10000);
    System.out.println(localDate3);
}

From source file:com.splicemachine.orc.OrcTester.java

private static Object preprocessWriteValueOld(TypeInfo typeInfo, Object value) throws IOException {
    if (value == null) {
        return null;
    }/*w  w w.  j  a  v  a 2  s  .co  m*/
    switch (typeInfo.getCategory()) {
    case PRIMITIVE:
        PrimitiveObjectInspector.PrimitiveCategory primitiveCategory = ((PrimitiveTypeInfo) typeInfo)
                .getPrimitiveCategory();
        switch (primitiveCategory) {
        case BOOLEAN:
            return value;
        case BYTE:
            return ((Number) value).byteValue();
        case SHORT:
            return ((Number) value).shortValue();
        case INT:
            return ((Number) value).intValue();
        case LONG:
            return ((Number) value).longValue();
        case FLOAT:
            return ((Number) value).floatValue();
        case DOUBLE:
            return ((Number) value).doubleValue();
        case DECIMAL:
            return HiveDecimal.create(((Decimal) value).toBigDecimal().bigDecimal());
        case STRING:
            return value;
        case CHAR:
            return new HiveChar(value.toString(), ((CharTypeInfo) typeInfo).getLength());
        case DATE:
            LocalDate localDate = LocalDate.ofEpochDay((int) value);
            ZonedDateTime zonedDateTime = localDate.atStartOfDay(ZoneId.systemDefault());

            long millis = zonedDateTime.toEpochSecond() * 1000;
            Date date = new Date(0);
            // mills must be set separately to avoid masking
            date.setTime(millis);
            return date;
        case TIMESTAMP:
            long millisUtc = ((Long) value).intValue();
            return new Timestamp(millisUtc);
        case BINARY:
            return ((String) value).getBytes();
        //                        return (byte[])value;
        }
        break;
    case MAP:
        MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
        TypeInfo keyTypeInfo = mapTypeInfo.getMapKeyTypeInfo();
        TypeInfo valueTypeInfo = mapTypeInfo.getMapValueTypeInfo();
        Map<Object, Object> newMap = new HashMap<>();
        for (Entry<?, ?> entry : ((Map<?, ?>) value).entrySet()) {
            newMap.put(preprocessWriteValueOld(keyTypeInfo, entry.getKey()),
                    preprocessWriteValueOld(valueTypeInfo, entry.getValue()));
        }
        return newMap;
    case LIST:
        ListTypeInfo listTypeInfo = (ListTypeInfo) typeInfo;
        TypeInfo elementTypeInfo = listTypeInfo.getListElementTypeInfo();
        List<Object> newList = new ArrayList<>(((Collection<?>) value).size());
        for (Object element : (Iterable<?>) value) {
            newList.add(preprocessWriteValueOld(elementTypeInfo, element));
        }
        return newList;
    case STRUCT:
        StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo;
        List<?> fieldValues = (List<?>) value;
        List<TypeInfo> fieldTypeInfos = structTypeInfo.getAllStructFieldTypeInfos();
        List<Object> newStruct = new ArrayList<>();
        for (int fieldId = 0; fieldId < fieldValues.size(); fieldId++) {
            newStruct.add(preprocessWriteValueOld(fieldTypeInfos.get(fieldId), fieldValues.get(fieldId)));
        }
        return newStruct;
    }
    throw new IOException(format("Unsupported Hive type: %s", typeInfo));
}

From source file:org.apache.nifi.processors.orc.PutORCTest.java

@Test
public void testWriteORCWithAvroLogicalTypes() throws IOException, InitializationException {
    final String avroSchema = IOUtils.toString(
            new FileInputStream("src/test/resources/user_logical_types.avsc"), StandardCharsets.UTF_8);
    schema = new Schema.Parser().parse(avroSchema);
    Calendar now = Calendar.getInstance();
    LocalTime nowTime = LocalTime.now();
    LocalDateTime nowDateTime = LocalDateTime.now();
    LocalDate epoch = LocalDate.ofEpochDay(0);
    LocalDate nowDate = LocalDate.now();

    final int timeMillis = nowTime.get(ChronoField.MILLI_OF_DAY);
    final Timestamp timestampMillis = Timestamp.valueOf(nowDateTime);
    final Date dt = Date.valueOf(nowDate);
    final double dec = 1234.56;

    configure(proc, 10, (numUsers, readerFactory) -> {
        for (int i = 0; i < numUsers; i++) {
            readerFactory.addRecord(i, timeMillis, timestampMillis, dt, dec);
        }/*  www . j  a v a 2s.c o  m*/
        return null;
    });

    final String filename = "testORCWithDefaults-" + System.currentTimeMillis();

    final Map<String, String> flowFileAttributes = new HashMap<>();
    flowFileAttributes.put(CoreAttributes.FILENAME.key(), filename);

    testRunner.setProperty(PutORC.HIVE_TABLE_NAME, "myTable");

    testRunner.enqueue("trigger", flowFileAttributes);
    testRunner.run();
    testRunner.assertAllFlowFilesTransferred(PutORC.REL_SUCCESS, 1);

    final Path orcFile = new Path(DIRECTORY + "/" + filename);

    // verify the successful flow file has the expected attributes
    final MockFlowFile mockFlowFile = testRunner.getFlowFilesForRelationship(PutORC.REL_SUCCESS).get(0);
    mockFlowFile.assertAttributeEquals(PutORC.ABSOLUTE_HDFS_PATH_ATTRIBUTE, orcFile.getParent().toString());
    mockFlowFile.assertAttributeEquals(CoreAttributes.FILENAME.key(), filename);
    mockFlowFile.assertAttributeEquals(PutORC.RECORD_COUNT_ATTR, "10");
    // DDL will be created with field names normalized (lowercased, e.g.) for Hive by default
    mockFlowFile.assertAttributeEquals(PutORC.HIVE_DDL_ATTRIBUTE,
            "CREATE EXTERNAL TABLE IF NOT EXISTS `myTable` (`id` INT, `timemillis` INT, `timestampmillis` TIMESTAMP, `dt` DATE, `dec` DOUBLE) STORED AS ORC");

    // verify we generated a provenance event
    final List<ProvenanceEventRecord> provEvents = testRunner.getProvenanceEvents();
    assertEquals(1, provEvents.size());

    // verify it was a SEND event with the correct URI
    final ProvenanceEventRecord provEvent = provEvents.get(0);
    assertEquals(ProvenanceEventType.SEND, provEvent.getEventType());
    // If it runs with a real HDFS, the protocol will be "hdfs://", but with a local filesystem, just assert the filename.
    Assert.assertTrue(provEvent.getTransitUri().endsWith(DIRECTORY + "/" + filename));

    // verify the content of the ORC file by reading it back in
    verifyORCUsers(orcFile, 10, (x, currUser) -> {
        assertEquals((int) currUser, ((IntWritable) x.get(0)).get());
        assertEquals(timeMillis, ((IntWritable) x.get(1)).get());
        assertEquals(timestampMillis, ((TimestampWritableV2) x.get(2)).getTimestamp().toSqlTimestamp());
        final DateFormat noTimeOfDayDateFormat = new SimpleDateFormat("yyyy-MM-dd");
        noTimeOfDayDateFormat.setTimeZone(TimeZone.getTimeZone("gmt"));
        assertEquals(noTimeOfDayDateFormat.format(dt), ((DateWritableV2) x.get(3)).get().toString());
        assertEquals(dec, ((DoubleWritable) x.get(4)).get(), Double.MIN_VALUE);
        return null;
    });

    // verify we don't have the temp dot file after success
    final File tempOrcFile = new File(DIRECTORY + "/." + filename);
    Assert.assertFalse(tempOrcFile.exists());

    // verify we DO have the CRC file after success
    final File crcAvroORCFile = new File(DIRECTORY + "/." + filename + ".crc");
    Assert.assertTrue(crcAvroORCFile.exists());
}

From source file:org.apache.tez.dag.history.logging.proto.DagManifesFileScanner.java

public DagManifesFileScanner(DatePartitionedLogger<ManifestEntryProto> manifestLogger) {
    this.manifestLogger = manifestLogger;
    this.syncTime = manifestLogger.getConfig().getLong(
            TezConfiguration.TEZ_HISTORY_LOGGING_PROTO_SYNC_WINDOWN_SECS,
            TezConfiguration.TEZ_HISTORY_LOGGING_PROTO_SYNC_WINDOWN_SECS_DEFAULT);
    this.withDoas = manifestLogger.getConfig().getBoolean(TezConfiguration.TEZ_HISTORY_LOGGING_PROTO_DOAS,
            TezConfiguration.TEZ_HISTORY_LOGGING_PROTO_DOAS_DEFAULT);
    this.setOffset(LocalDate.ofEpochDay(0));
}

From source file:org.pentaho.hadoop.shim.common.format.avro.AvroNestedReader.java

/**
 * Perform Kettle type conversions for the Avro leaf field value.
 *
 * @param fieldValue the leaf value from the Avro structure
 * @return an Object of the appropriate Kettle type
 * @throws KettleException if a problem occurs
 *//*from w  w w .java2 s . co m*/
protected Object getKettleValue(AvroInputField avroInputField, Object fieldValue) throws KettleException {

    switch (avroInputField.getTempValueMeta().getType()) {
    case ValueMetaInterface.TYPE_BIGNUMBER:
        return avroInputField.getTempValueMeta().getBigNumber(fieldValue);
    case ValueMetaInterface.TYPE_BINARY:
        return avroInputField.getTempValueMeta().getBinary(fieldValue);
    case ValueMetaInterface.TYPE_BOOLEAN:
        return avroInputField.getTempValueMeta().getBoolean(fieldValue);
    case ValueMetaInterface.TYPE_DATE:
        if (avroInputField.getAvroType().getBaseType() == AvroSpec.DataType.INTEGER.getBaseType()) {
            LocalDate localDate = LocalDate.ofEpochDay(0).plusDays((Long) fieldValue);
            return Date.from(localDate.atStartOfDay(ZoneId.systemDefault()).toInstant());
        } else if (avroInputField.getAvroType().getBaseType() == AvroSpec.DataType.STRING.getBaseType()) {
            Object pentahoData = null;
            String dateFormatStr = avroInputField.getStringFormat();
            if ((dateFormatStr == null) || (dateFormatStr.trim().length() == 0)) {
                dateFormatStr = ValueMetaBase.DEFAULT_DATE_FORMAT_MASK;
            }
            SimpleDateFormat datePattern = new SimpleDateFormat(dateFormatStr);
            try {
                return datePattern.parse(fieldValue.toString());
            } catch (Exception e) {
                return null;
            }
        }
        return avroInputField.getTempValueMeta().getDate(fieldValue);
    case ValueMetaInterface.TYPE_TIMESTAMP:
        return new Timestamp((Long) fieldValue);
    case ValueMetaInterface.TYPE_INTEGER:
        return avroInputField.getTempValueMeta().getInteger(fieldValue);
    case ValueMetaInterface.TYPE_NUMBER:
        return avroInputField.getTempValueMeta().getNumber(fieldValue);
    case ValueMetaInterface.TYPE_STRING:
        return avroInputField.getTempValueMeta().getString(fieldValue);
    case ValueMetaInterface.TYPE_INET:
        try {
            return InetAddress.getByName(fieldValue.toString());
        } catch (UnknownHostException ex) {
            return null;
        }
    default:
        return null;
    }
}

From source file:tech.tablesaw.filters.TimeDependentFilteringTest.java

private static LocalDate randomDate() {
    Random random = new Random();
    int minDay = (int) LocalDate.of(2000, 1, 1).toEpochDay();
    int maxDay = (int) LocalDate.of(2016, 1, 1).toEpochDay();
    long randomDay = minDay + random.nextInt(maxDay - minDay);
    return LocalDate.ofEpochDay(randomDay);
}