List of usage examples for java.util.concurrent TimeUnit DAYS
TimeUnit DAYS
To view the source code for java.util.concurrent TimeUnit DAYS.
Click Source Link
From source file:org.apache.metron.dataloads.bulk.ElasticsearchDataPrunerIntegrationTest.java
@Before public void setUp() throws Exception { super.setUp(); ensureGreen();/*from w ww. j av a2 s . com*/ TimeZone timeZone = TimeZone.getTimeZone("UTC"); Calendar calendar = Calendar.getInstance(timeZone); calendar.set(Calendar.HOUR_OF_DAY, 0); calendar.set(Calendar.MINUTE, 0); calendar.set(Calendar.SECOND, 0); testingDate = calendar.getTime(); yesterday.setTime(testingDate.getTime() - TimeUnit.DAYS.toMillis(1)); dateFormat.setTimeZone(timeZone); File resourceFile = new File(TestConstants.SAMPLE_CONFIG_PATH); Path resourcePath = Paths.get(resourceFile.getCanonicalPath()); configuration = new Configuration(resourcePath); }
From source file:com.linkedin.pinot.core.util.CrcUtilsTest.java
private String makeSegmentAndReturnPath() throws Exception { final String filePath = TestUtils.getFileFromResourceUrl( ChunkIndexCreationDriverImplTest.class.getClassLoader().getResource(AVRO_DATA)); final SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns( new File(filePath), INDEX_DIR, "daysSinceEpoch", TimeUnit.DAYS, "testTable"); config.setSegmentNamePostfix("1"); config.setTimeColumnName("daysSinceEpoch"); final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null); driver.init(config);/*from w ww . ja va2 s .c o m*/ driver.build(); return new File(INDEX_DIR, driver.getSegmentName()).getAbsolutePath(); }
From source file:edu.indiana.soic.ts.mapreduce.VectorCalculator.java
public void submitJob() { try {/*from ww w .j a v a2 s .com*/ Configuration config = HBaseConfiguration.create(); config.set("mapreduce.output.textoutputformat.separator", ","); TreeMap<String, List<Date>> genDates = TableUtils.genDates(TableUtils.getDate(startDate), TableUtils.getDate(endDate), this.window, TimeUnit.DAYS, this.headShift, this.tailShift, TimeUnit.DAYS); LOG.info("Start Date : {} End Date : {}, Gen dates size: {}", startDate, endDate, genDates.size()); for (String id : genDates.keySet()) { LOG.info("Vector calculation for: {}", id); Scan scan = new Scan(); scan.setCaching(500); // 1 is the default in Scan, which will be bad for MapReduce jobs scan.setCacheBlocks(false); // don't set to true for MR jobs List<Date> dates = genDates.get(id); String start = TableUtils.convertDateToString(dates.get(0)); String end = TableUtils.convertDateToString(dates.get(1)); List<String> suitableDateList = TableUtils.getDates(start, end); config.set(Constants.Job.NO_OF_DAYS, String.valueOf(suitableDateList.size())); LOG.info("Vector calculator for start: {}, end: {} time window: {}, shift: {}, days: {}", startDate, endDate, window, headShift, suitableDateList.size()); for (String date : suitableDateList) { scan.addColumn(Constants.STOCK_TABLE_CF_BYTES, date.getBytes()); } Job job = new Job(config, "Vector calculation: " + id); job.setJarByClass(VectorCalculator.class); TableMapReduceUtil.initTableMapperJob(Constants.STOCK_TABLE_NAME, // input HBase table name scan, // Scan instance to control CF and attribute selection VectorCalculatorMapper.class, // mapper IntWritable.class, // mapper output key Text.class, // mapper output value job); // adjust directories as required String outPutDir = tsConfiguration.getInterMediateVectorDir() + "/" + id; FileOutputFormat.setOutputPath(job, new Path(outPutDir)); boolean b = job.waitForCompletion(true); if (!b) { LOG.error("Error with job for vector calculation"); throw new RuntimeException("Error with job for vector calculation"); } Utils.concatOutput(config, id, outPutDir, tsConfiguration.getVectorDir()); } } catch (ParseException e) { LOG.error("Error while parsing date", e); throw new RuntimeException("Error while parsing date", e); } catch (InterruptedException | ClassNotFoundException | IOException e) { LOG.error("Error while creating the job", e); throw new RuntimeException("Error while creating the job", e); } }
From source file:Main.java
/** * This method returns the number of milliseconds (UTC time) for today's date at midnight in * the local time zone. For example, if you live in California and the day is September 20th, * 2016 and it is 6:30 PM, it will return 1474329600000. Now, if you plug this number into an * Epoch time converter, you may be confused that it tells you this time stamp represents 8:00 * PM on September 19th local time, rather than September 20th. We're concerned with the GMT * date here though, which is correct, stating September 20th, 2016 at midnight. * * As another example, if you are in Hong Kong and the day is September 20th, 2016 and it is * 6:30 PM, this method will return 1474329600000. Again, if you plug this number into an Epoch * time converter, you won't get midnight for your local time zone. Just keep in mind that we * are just looking at the GMT date here. * * This method will ALWAYS return the date at midnight (in GMT time) for the time zone you * are currently in. In other words, the GMT date will always represent your date. * * Since UTC / GMT time are the standard for all time zones in the world, we use it to * normalize our dates that are stored in the database. When we extract values from the * database, we adjust for the current time zone using time zone offsets. * * @return The number of milliseconds (UTC / GMT) for today's date at midnight in the local * time zone//w ww. j av a2 s .c o m */ public static long getNormalizedUtcDateForToday() { /* * This number represents the number of milliseconds that have elapsed since January * 1st, 1970 at midnight in the GMT time zone. */ long utcNowMillis = System.currentTimeMillis(); /* * This TimeZone represents the device's current time zone. It provides us with a means * of acquiring the offset for local time from a UTC time stamp. */ TimeZone currentTimeZone = TimeZone.getDefault(); /* * The getOffset method returns the number of milliseconds to add to UTC time to get the * elapsed time since the epoch for our current time zone. We pass the current UTC time * into this method so it can determine changes to account for daylight savings time. */ long gmtOffsetMillis = currentTimeZone.getOffset(utcNowMillis); /* * UTC time is measured in milliseconds from January 1, 1970 at midnight from the GMT * time zone. Depending on your time zone, the time since January 1, 1970 at midnight (GMT) * will be greater or smaller. This variable represents the number of milliseconds since * January 1, 1970 (GMT) time. */ long timeSinceEpochLocalTimeMillis = utcNowMillis + gmtOffsetMillis; /* This method simply converts milliseconds to days, disregarding any fractional days */ long daysSinceEpochLocal = TimeUnit.MILLISECONDS.toDays(timeSinceEpochLocalTimeMillis); /* * Finally, we convert back to milliseconds. This time stamp represents today's date at * midnight in GMT time. We will need to account for local time zone offsets when * extracting this information from the database. */ long normalizedUtcMidnightMillis = TimeUnit.DAYS.toMillis(daysSinceEpochLocal); return normalizedUtcMidnightMillis; }
From source file:org.jdal.text.PeriodFormatter.java
/** * convert period and unit to millis/*from www . j a v a 2 s.co m*/ * @param value period value * @param unit time unit * @return value in millis. */ private long parse(long value, String unit) { if (DAYS.equalsIgnoreCase(unit)) return TimeUnit.DAYS.toMillis(value); else if (HOURS.equalsIgnoreCase(unit)) return TimeUnit.HOURS.toMillis(value); else if (MINUTES.equalsIgnoreCase(unit)) return TimeUnit.MINUTES.toMillis(value); else if (SECONDS.equalsIgnoreCase(unit)) return TimeUnit.SECONDS.toMillis(value); return 0; }
From source file:com.linkedin.pinot.core.chunk.creator.impl.ChunkIndexCreationDriverImplTest.java
@BeforeClass public void setUP() throws Exception { if (INDEX_DIR.exists()) { FileUtils.deleteQuietly(INDEX_DIR); }//from w ww . j a v a 2s.com final String filePath = TestUtils.getFileFromResourceUrl( ChunkIndexCreationDriverImplTest.class.getClassLoader().getResource(AVRO_DATA)); final SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns( new File(filePath), INDEX_DIR, "daysSinceEpoch", TimeUnit.DAYS, "testTable"); config.setSegmentNamePostfix("1"); config.setTimeColumnName("daysSinceEpoch"); final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null); driver.init(config); driver.build(); }
From source file:com.linkedin.pinot.core.startree.TestStarTreeSegmentCreator.java
@Test(enabled = true) public void testCreation() throws Exception { BasicConfigurator.configure();//from w ww . j av a 2s . c o m final SegmentGeneratorConfig config = SegmentTestUtils.getSegmentGenSpecWithSchemAndProjectedColumns( avroFile, indexDir, "daysSinceEpoch", TimeUnit.DAYS, "testTable"); config.setSegmentNamePostfix("1"); config.setTimeColumnName("daysSinceEpoch"); // Set the star tree index config StarTreeIndexSpec starTreeIndexSpec = new StarTreeIndexSpec(); // starTreeIndexSpec.setSplitExcludes(Arrays.asList("D1", "daysSinceEpoch")); starTreeIndexSpec.setSplitExcludes(Arrays.asList("daysSinceEpoch")); starTreeIndexSpec.setMaxLeafRecords(4); config.getSchema().setStarTreeIndexSpec(starTreeIndexSpec); final SegmentIndexCreationDriver driver = SegmentCreationDriverFactory.get(null); driver.init(config); driver.build(); }
From source file:com.ryantenney.metrics.spring.reporter.AbstractScheduledReporterFactoryBean.java
/** * Parses and converts to nanoseconds a string representing * a duration, ie: 500ms, 30s, 5m, 1h, etc * @param duration a string representing a duration * @return the duration in nanoseconds// w ww . j a v a2s. co m */ protected long convertDurationString(String duration) { final Matcher m = DURATION_STRING_PATTERN.matcher(duration); if (!m.matches()) { throw new IllegalArgumentException("Invalid duration string format"); } final long sourceDuration = Long.parseLong(m.group(1)); final String sourceUnitString = m.group(2); final TimeUnit sourceUnit; if ("ns".equalsIgnoreCase(sourceUnitString)) { sourceUnit = TimeUnit.NANOSECONDS; } else if ("us".equalsIgnoreCase(sourceUnitString)) { sourceUnit = TimeUnit.MICROSECONDS; } else if ("ms".equalsIgnoreCase(sourceUnitString)) { sourceUnit = TimeUnit.MILLISECONDS; } else if ("s".equalsIgnoreCase(sourceUnitString)) { sourceUnit = TimeUnit.SECONDS; } else if ("m".equalsIgnoreCase(sourceUnitString)) { sourceUnit = TimeUnit.MINUTES; } else if ("h".equalsIgnoreCase(sourceUnitString)) { sourceUnit = TimeUnit.HOURS; } else if ("d".equalsIgnoreCase(sourceUnitString)) { sourceUnit = TimeUnit.DAYS; } else { sourceUnit = TimeUnit.MILLISECONDS; } return sourceUnit.toNanos(sourceDuration); }
From source file:com.linkedin.pinot.core.startree.TestStarTreeIntegrationTest.java
@Test public void testSimple() throws Exception { int numDimensions = 4; int numMetrics = 2; int ROWS = (int) MathUtils.factorial(numDimensions); final Schema schema = new Schema(); for (int i = 0; i < numDimensions; i++) { String dimName = "d" + (i + 1); DimensionFieldSpec dimensionFieldSpec = new DimensionFieldSpec(dimName, DataType.STRING, true); schema.addField(dimName, dimensionFieldSpec); }//from www. j a v a 2s . co m schema.setTimeFieldSpec(new TimeFieldSpec("daysSinceEpoch", DataType.INT, TimeUnit.DAYS)); for (int i = 0; i < numMetrics; i++) { String metricName = "m" + (i + 1); MetricFieldSpec metricFieldSpec = new MetricFieldSpec(metricName, DataType.INT); schema.addField(metricName, metricFieldSpec); } SegmentGeneratorConfig config = new SegmentGeneratorConfig(schema); config.setEnableStarTreeIndex(true); String tempOutputDir = "/tmp/star-tree-index"; config.setOutDir(tempOutputDir); config.setFormat(FileFormat.AVRO); config.setSegmentName("testSimple"); SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl(); final List<GenericRow> data = new ArrayList<>(); for (int row = 0; row < ROWS; row++) { HashMap<String, Object> map = new HashMap<>(); for (int i = 0; i < numDimensions; i++) { String dimName = schema.getDimensionFieldSpecs().get(i).getName(); map.put(dimName, dimName + "-v" + row % (numDimensions - i)); } //time map.put("daysSinceEpoch", 1); for (int i = 0; i < numMetrics; i++) { String metName = schema.getMetricFieldSpecs().get(i).getName(); map.put(metName, 1); } GenericRow genericRow = new GenericRow(); genericRow.init(map); data.add(genericRow); } RecordReader reader = createReader(schema, data); driver.init(config, reader); driver.build(); ReadMode mode = ReadMode.heap; //query to test String[] metricNames = new String[] { "m1" }; String query = "select sum(m1) from T"; Pql2Compiler compiler = new Pql2Compiler(); BrokerRequest brokerRequest = compiler.compileToBrokerRequest(query); IndexSegment segment = Loaders.IndexSegment.load(new File(tempOutputDir, driver.getSegmentName()), mode); FilterPlanNode planNode = new FilterPlanNode(segment, brokerRequest); Operator rawOperator = planNode.run(); BlockDocIdIterator rawDocIdIterator = rawOperator.nextBlock().getBlockDocIdSet().iterator(); double[] expectedSums = computeSum(segment, rawDocIdIterator, metricNames); System.out.println("expectedSums=" + Arrays.toString(expectedSums)); //dump contents Iterator<GenericRow> rowIterator = ((IndexSegmentImpl) segment).iterator(0, segment.getSegmentMetadata().getTotalDocs()); int counter = 0; while (rowIterator.hasNext()) { GenericRow genericRow = rowIterator.next(); StringBuilder sb = new StringBuilder().append(counter++).append(": \t"); for (String dimName : schema.getDimensionNames()) { sb.append(dimName).append(":").append(genericRow.getValue(dimName)).append(", "); } if (schema.getTimeColumnName() != null) { sb.append(schema.getTimeColumnName()).append(":") .append(genericRow.getValue(schema.getTimeColumnName())).append(", "); } for (String metName : schema.getMetricNames()) { sb.append(metName).append(":").append(genericRow.getValue(metName)).append(", "); } System.out.println(sb); } StarTreeIndexOperator starTreeOperator = new StarTreeIndexOperator(segment, brokerRequest); starTreeOperator.open(); BlockDocIdIterator starTreeDocIdIterator = starTreeOperator.nextBlock().getBlockDocIdSet().iterator(); double[] actualSums = computeSum(segment, starTreeDocIdIterator, metricNames); System.out.println("actualSums=" + Arrays.toString(actualSums)); }
From source file:com.dsclab.loader.app.Loader.java
public static void load(Configs prop) throws SQLException, ClassNotFoundException, InterruptedException, ExecutionException { int readThread = prop.getReadThread(); int writeThread = prop.getWriteThread(); ExecutorService readExecutor = Executors.newFixedThreadPool(readThread); ExecutorService writeExecutor = Executors.newFixedThreadPool(writeThread); LOG.info("Start load: writeThread:" + writeThread + ", readThread:" + readThread); BlockingQueue<List<String>> contentQueue = new LinkedBlockingQueue<>(); int tableCount = tableTask.size(); int sum = 0;/*w ww . j av a 2 s . c o m*/ for (int i = 0; i < tableCount; i++) { sum = sum + tableTask.get(i).getTaskSqlList().size(); } for (int i = 0; i < sum; i++) { readExecutor.submit(new ProducerThread(prop.getInputURL(), contentQueue)); writeExecutor.submit(new ConsumerThread(prop.getOutputURL(), contentQueue)); } readExecutor.shutdown(); readExecutor.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS); System.out.println("[CHIA7712] read threads end"); writeExecutor.shutdown(); writeExecutor.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS); System.out.println("[CHIA7712] write threads end"); }