Example usage for org.joda.time Instant Instant

List of usage examples for org.joda.time Instant Instant

Introduction

In this page you can find the example usage for org.joda.time Instant Instant.

Prototype

public Instant() 

Source Link

Document

Constructs an instance set to the current system millisecond time.

Usage

From source file:app.sunstreak.yourpisd.net.Student.java

License:Open Source License

/**
 * Uses internet every time.//ww  w  .  j a va 2  s .  co  m
 * 
 * @throws JSONException
 */
public int[][] loadGradeSummary() throws JSONException {
    try {
        String classId = "" + classList.getJSONObject(0).getInt("enrollmentId");
        String termId = "" + classList.getJSONObject(0).getJSONArray("terms").getJSONObject(0).getInt("termId");

        String url = "https://gradebook.pisd.edu/Pinnacle/Gradebook/InternetViewer/GradeSummary.aspx?"
                + "&EnrollmentId=" + classId + "&TermId=" + termId + "&ReportType=0&StudentId=" + studentId;

        HTTPResponse summary = Request.sendGet(url, session.cookies);
        String response = summary.getData();
        int responseCode = summary.getResponseCode();

        if (responseCode != 200)
            System.out.println("Response code: " + responseCode);

        /*
         * puts averages in classList, under each term.
         */
        Element doc = Jsoup.parse(response);
        gradeSummary = Parser.gradeSummary(doc, classList);

        matchClasses(gradeSummary);

        for (int classIndex = 0; classIndex < gradeSummary.length; classIndex++) {
            int jsonIndex = classMatch[classIndex];
            JSONArray terms = classList.getJSONObject(jsonIndex).getJSONArray("terms");

            int firstTermIndex = 0;
            int lastTermIndex = 0;

            if (terms.length() == 8) {
                // Full year course
                firstTermIndex = 0;
                lastTermIndex = 7;
            } else if (terms.length() == 4) {
                if (terms.optJSONObject(0).optString("description").equals("1st Six Weeks")) {
                    // First semester course
                    firstTermIndex = 0;
                    lastTermIndex = 3;
                } else {
                    // Second semester course
                    firstTermIndex = 4;
                    lastTermIndex = 7;
                }
            }

            for (int termIndex = firstTermIndex; termIndex <= lastTermIndex; termIndex++) {
                int arrayLocation = termIndex > 3 ? termIndex + 2 : termIndex + 1;
                int average = gradeSummary[classIndex][arrayLocation];
                if (average != NO_GRADES_ENTERED)
                    classList.getJSONObject(jsonIndex).getJSONArray("terms")
                            .getJSONObject(termIndex - firstTermIndex).put("average", average);
            }

            classList.getJSONObject(jsonIndex).put("firstSemesterAverage", gradeSummary[classIndex][5]);
            classList.getJSONObject(jsonIndex).put("secondSemesterAverage", gradeSummary[classIndex][10]);
        }

        // Last updated time of summary --> goes in this awkward place
        classList.getJSONObject(0).put("summaryLastUpdated", new Instant().getMillis());

        return gradeSummary;
    } catch (IOException e) {
        e.printStackTrace();
        return null;
    } catch (JSONException e) {
        e.printStackTrace();
        return null;
    }
}

From source file:app.sunstreak.yourpisd.net.Student.java

License:Open Source License

public JSONObject getClassGrade(int classIndex, int termIndex) throws JSONException {

    String html = "";

    int classId = gradeSummary[classIndex][0];
    int termIndexOffset = 0;
    if (gradeSummary[classIndex][3] == CLASS_DISABLED_DURING_TERM)
        termIndexOffset = 4;/* w w  w  .j  a  v a 2 s.co  m*/

    termIndex -= termIndexOffset;

    if (hasClassGrade(classIndex, termIndex + termIndexOffset))
        return classGrades.get(classIndex).optJSONArray("terms").optJSONObject(termIndex);

    try {

        int termId = getTermIds(classId)[termIndex];

        html = getDetailedReport(classId, termId, studentId);

    } catch (IOException e) {
        e.printStackTrace();
    } catch (JSONException e) {
        e.printStackTrace();
    }

    // Parse the teacher name if not already there.
    try {
        classList.getJSONObject(classIndex).getString("teacher");
    } catch (JSONException e) {
        // Teacher was not found.
        String[] teacher = Parser.teacher(html);
        try {
            classList.getJSONObject(classIndex).put("teacher", teacher[0]);
            classList.getJSONObject(classIndex).put("teacherEmail", teacher[1]);
        } catch (JSONException f) {
            e.printStackTrace();
        }
    }

    JSONObject classGrade;

    try {
        classGrade = new JSONObject(classList.getJSONObject(getClassMatch()[classIndex]).toString());

        JSONArray termGrades = Parser.detailedReport(html);
        Object[] termCategory = Parser.termCategoryGrades(html);

        JSONArray termCategoryGrades = (JSONArray) termCategory[0];
        if ((Integer) termCategory[1] != -1)
            classGrade.getJSONArray("terms").getJSONObject(termIndex).put("average", termCategory[1]);

        classGrade.getJSONArray("terms").getJSONObject(termIndex).put("grades", termGrades);
        classGrade.getJSONArray("terms").getJSONObject(termIndex).put("categoryGrades", termCategoryGrades);

        Instant in = new Instant();
        // String time = in.toString();
        // System.out.println(time);
        classGrade.getJSONArray("terms").getJSONObject(termIndex).put("lastUpdated", in.getMillis());
        // classGrade.getJSONArray("terms").getJSONObject(termIndex).put("lastUpdated",
        // "0");

        // System.out.println("cg= " + classGrade);

        if (classGrades.indexOfKey(classIndex) < 0)
            classGrades.put(classIndex, classGrade);

        return classGrade.getJSONArray("terms").getJSONObject(termIndex);

    } catch (JSONException e) {
        System.err.println("Error: Class index = " + classIndex + "; JSON index = "
                + getClassMatch()[classIndex] + "; Term index = " + termIndex + ".");
        e.printStackTrace();
        return null;
    }

}

From source file:com.amediamanager.scheduled.ElasticTranscoderTasks.java

License:Apache License

protected void handleMessage(final Message message) {
    try {//from   w ww.ja v  a 2 s .co  m
        LOG.info("Handling message received from checkStatus");
        ObjectNode snsMessage = (ObjectNode) mapper.readTree(message.getBody());
        ObjectNode notification = (ObjectNode) mapper.readTree(snsMessage.get("Message").asText());
        String state = notification.get("state").asText();
        String jobId = notification.get("jobId").asText();
        String pipelineId = notification.get("pipelineId").asText();
        Video video = videoService.findByTranscodeJobId(jobId);
        if (video == null) {
            LOG.warn("Unable to process result for job {} because it does not exist.", jobId);
            Instant msgTime = Instant.parse(snsMessage.get("Timestamp").asText());
            if (Minutes.minutesBetween(msgTime, new Instant()).getMinutes() > 20) {
                LOG.error("Job {} has not been found for over 20 minutes, deleting message from queue", jobId);
                deleteMessage(message);
            }
            // Leave it on the queue for now.
            return;
        }
        if ("ERROR".equals(state)) {
            LOG.warn("Job {} for pipeline {} failed to complete. Body: \n{}", jobId, pipelineId,
                    notification.get("messageDetails").asText());
            video.setThumbnailKey(videoService.getDefaultVideoPosterKey());
            videoService.save(video);
        } else {
            // Construct our url prefix: https://bucketname.s3.amazonaws.com/output/key/
            String prefix = notification.get("outputKeyPrefix").asText();
            if (!prefix.endsWith("/")) {
                prefix += "/";
            }

            ObjectNode output = ((ObjectNode) ((ArrayNode) notification.get("outputs")).get(0));
            String previewFilename = prefix + output.get("key").asText();
            String thumbnailFilename = prefix
                    + output.get("thumbnailPattern").asText().replaceAll("\\{count\\}", "00002") + ".png";
            video.setPreviewKey(previewFilename);
            video.setThumbnailKey(thumbnailFilename);
            videoService.save(video);
        }
        deleteMessage(message);
    } catch (JsonProcessingException e) {
        LOG.error("JSON exception handling notification: {}", message.getBody(), e);
    } catch (IOException e) {
        LOG.error("IOException handling notification: {}", message.getBody(), e);
    }
}

From source file:com.auth10.federation.SamlTokenValidator.java

License:Open Source License

private boolean validateExpiration(Instant notBefore, Instant notOnOrAfter) {

    Instant now = new Instant();
    Duration skew = new Duration(MAX_CLOCK_SKEW_IN_MINUTES * 60 * 1000);

    if (now.plus(skew).isBefore(notBefore)) {
        return true;
    }//from w  w  w.j  a  va2 s  .c o m

    if (now.minus(skew).isAfter(notOnOrAfter)) {
        return true;
    }

    return false;
}

From source file:com.cloudera.api.ApiUtils.java

License:Apache License

public static Instant newInstantFromString(String value) {
    if (value.equalsIgnoreCase(Parameters.DATE_TIME_NOW)) {
        return new Instant();
    }/*from   ww  w.  j  av  a 2 s.c  om*/

    return new Instant(DATE_TIME_PARSER.parseMillis(value));
}

From source file:com.example.geomesa.kafka.KafkaQuickStart.java

License:Open Source License

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);

    // create the producer and consumer KafkaDataStore objects
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    dsConf.put("isProducer", "true");
    DataStore producerDS = DataStoreFinder.getDataStore(dsConf);
    dsConf.put("isProducer", "false");
    DataStore consumerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore objects properly
    if (producerDS == null) {
        throw new Exception("Null producer KafkaDataStore");
    }/*w w w. j  a  va2 s.com*/
    if (consumerDS == null) {
        throw new Exception("Null consumer KafkaDataStore");
    }

    // create the schema which creates a topic in Kafka
    // (only needs to be done once)
    final String sftName = "KafkaQuickStart";
    final String sftSchema = "name:String,age:Int,dtg:Date,*geom:Point:srid=4326";
    SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema);
    // set zkPath to default if not specified
    String zkPath = (dsConf.get(ZK_PATH) == null) ? "/geomesa/ds/kafka" : dsConf.get(ZK_PATH);
    SimpleFeatureType preppedOutputSft = KafkaDataStoreHelper.createStreamingSFT(sft, zkPath);
    // only create the schema if it hasn't been created already
    if (!Arrays.asList(producerDS.getTypeNames()).contains(sftName))
        producerDS.createSchema(preppedOutputSft);

    System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)");
    System.in.read();

    // the live consumer must be created before the producer writes features
    // in order to read streaming data.
    // i.e. the live consumer will only read data written after its instantiation
    SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName);
    SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName);

    // creates and adds SimpleFeatures to the producer every 1/5th of a second
    System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes");
    Instant replayStart = new Instant();
    addSimpleFeatures(sft, producerFS);
    Instant replayEnd = new Instant();

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    SimpleFeatureCollection featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    addDeleteNewFeature(sft, producerFS);

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    // the state of the two SimpleFeatures is real time here
    System.out.println("Here are the two SimpleFeatures that were obtained with the live consumer:");
    SimpleFeatureIterator featureIterator = featureCollection.features();
    SimpleFeature feature1 = featureIterator.next();
    SimpleFeature feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    // REPLAY CONSUMER - will obtain the state of SimpleFeatures at any specified time
    // Replay consumer requires a ReplayConfig which takes a time range and a
    // duration of time to process
    System.out.println("\nConsuming with the replay consumer...");
    Duration readBehind = new Duration(1000); // 1 second readBehind
    ReplayConfig rc = new ReplayConfig(replayStart, replayEnd, readBehind);
    SimpleFeatureType replaySFT = KafkaDataStoreHelper.createReplaySFT(preppedOutputSft, rc);
    producerDS.createSchema(replaySFT);
    SimpleFeatureSource replayConsumerFS = consumerDS.getFeatureSource(replaySFT.getName());

    // querying for the state of SimpleFeatures approximately 5 seconds before the replayEnd.
    // the ReplayKafkaConsumerFeatureSource will build the state of SimpleFeatures
    // by processing all of the messages that were sent in between queryTime-readBehind and queryTime.
    // only the messages in between replayStart and replayEnd are cached.
    Instant queryTime = replayEnd.minus(5000);
    featureCollection = replayConsumerFS.getFeatures(ReplayTimeHelper.toFilter(queryTime));
    System.out.println(featureCollection.size() + " features were written to Kafka");

    System.out.println("Here are the two SimpleFeatures that were obtained with the replay consumer:");
    featureIterator = featureCollection.features();
    feature1 = featureIterator.next();
    feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    if (System.getProperty("clear") != null) {
        // Run Java command with -Dclear=true
        // This will cause a 'clear'
        producerFS.removeFeatures(Filter.INCLUDE);
    }

    System.exit(0);
}

From source file:com.example.geomesa.kafka08.KafkaQuickStart.java

License:Open Source License

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);

    // create the producer and consumer KafkaDataStore objects
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    dsConf.put("isProducer", "true");
    DataStore producerDS = DataStoreFinder.getDataStore(dsConf);
    dsConf.put("isProducer", "false");
    DataStore consumerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore objects properly
    if (producerDS == null) {
        throw new Exception("Null producer KafkaDataStore");
    }/*from w w w.j a  v a 2 s  .  c o m*/
    if (consumerDS == null) {
        throw new Exception("Null consumer KafkaDataStore");
    }

    // create the schema which creates a topic in Kafka
    // (only needs to be done once)
    final String sftName = "KafkaQuickStart08";
    final String sftSchema = "name:String,age:Int,dtg:Date,*geom:Point:srid=4326";
    SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema);
    // set zkPath to default if not specified
    String zkPath = (dsConf.get(ZK_PATH) == null) ? "/geomesa/ds/kafka" : dsConf.get(ZK_PATH);
    SimpleFeatureType preppedOutputSft = KafkaDataStoreHelper.createStreamingSFT(sft, zkPath);
    // only create the schema if it hasn't been created already
    if (!Arrays.asList(producerDS.getTypeNames()).contains(sftName))
        producerDS.createSchema(preppedOutputSft);
    if (!cmd.hasOption("automated")) {
        System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)");
        System.in.read();
    }

    // the live consumer must be created before the producer writes features
    // in order to read streaming data.
    // i.e. the live consumer will only read data written after its instantiation
    SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName);
    SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName);

    // creates and adds SimpleFeatures to the producer every 1/5th of a second
    System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes");
    Instant replayStart = new Instant();

    String vis = cmd.getOptionValue(VISIBILITY);
    if (vis != null)
        System.out.println("Writing features with " + vis);
    addSimpleFeatures(sft, producerFS, vis);
    Instant replayEnd = new Instant();

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    SimpleFeatureCollection featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    addDeleteNewFeature(sft, producerFS);

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    // the state of the two SimpleFeatures is real time here
    System.out.println("Here are the two SimpleFeatures that were obtained with the live consumer:");
    SimpleFeatureIterator featureIterator = featureCollection.features();
    SimpleFeature feature1 = featureIterator.next();
    SimpleFeature feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    // REPLAY CONSUMER - will obtain the state of SimpleFeatures at any specified time
    // Replay consumer requires a ReplayConfig which takes a time range and a
    // duration of time to process
    System.out.println("\nConsuming with the replay consumer...");
    Duration readBehind = new Duration(1000); // 1 second readBehind
    ReplayConfig rc = new ReplayConfig(replayStart, replayEnd, readBehind);
    SimpleFeatureType replaySFT = KafkaDataStoreHelper.createReplaySFT(preppedOutputSft, rc);
    producerDS.createSchema(replaySFT);
    SimpleFeatureSource replayConsumerFS = consumerDS.getFeatureSource(replaySFT.getName());

    // querying for the state of SimpleFeatures approximately 5 seconds before the replayEnd.
    // the ReplayKafkaConsumerFeatureSource will build the state of SimpleFeatures
    // by processing all of the messages that were sent in between queryTime-readBehind and queryTime.
    // only the messages in between replayStart and replayEnd are cached.
    Instant queryTime = replayEnd.minus(5000);
    featureCollection = replayConsumerFS.getFeatures(ReplayTimeHelper.toFilter(queryTime));
    System.out.println(featureCollection.size() + " features were written to Kafka");

    System.out.println("Here are the two SimpleFeatures that were obtained with the replay consumer:");
    featureIterator = featureCollection.features();
    feature1 = featureIterator.next();
    feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    if (System.getProperty("clear") != null) {
        // Run Java command with -Dclear=true
        // This will cause a 'clear'
        producerFS.removeFeatures(Filter.INCLUDE);
    }

    System.exit(0);
}

From source file:com.example.geomesa.kafka09.KafkaQuickStart.java

License:Open Source License

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);

    // create the producer and consumer KafkaDataStore objects
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    dsConf.put("isProducer", "true");
    DataStore producerDS = DataStoreFinder.getDataStore(dsConf);
    dsConf.put("isProducer", "false");
    DataStore consumerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore objects properly
    if (producerDS == null) {
        throw new Exception("Null producer KafkaDataStore");
    }/*www .j  a  va 2 s. c  om*/
    if (consumerDS == null) {
        throw new Exception("Null consumer KafkaDataStore");
    }

    // create the schema which creates a topic in Kafka
    // (only needs to be done once)
    final String sftName = "KafkaQuickStart09";
    final String sftSchema = "name:String,age:Int,dtg:Date,*geom:Point:srid=4326";
    SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema);
    // set zkPath to default if not specified
    String zkPath = (dsConf.get(ZK_PATH) == null) ? "/geomesa/ds/kafka" : dsConf.get(ZK_PATH);
    SimpleFeatureType preppedOutputSft = KafkaDataStoreHelper.createStreamingSFT(sft, zkPath);
    // only create the schema if it hasn't been created already
    if (!Arrays.asList(producerDS.getTypeNames()).contains(sftName))
        producerDS.createSchema(preppedOutputSft);
    if (!cmd.hasOption("automated")) {
        System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)");
        System.in.read();
    }

    // the live consumer must be created before the producer writes features
    // in order to read streaming data.
    // i.e. the live consumer will only read data written after its instantiation
    SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName);
    SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName);

    // creates and adds SimpleFeatures to the producer every 1/5th of a second
    System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes");
    Instant replayStart = new Instant();

    String vis = cmd.getOptionValue(VISIBILITY);
    if (vis != null)
        System.out.println("Writing features with " + vis);
    addSimpleFeatures(sft, producerFS, vis);
    Instant replayEnd = new Instant();

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    SimpleFeatureCollection featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    addDeleteNewFeature(sft, producerFS);

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    // the state of the two SimpleFeatures is real time here
    System.out.println("Here are the two SimpleFeatures that were obtained with the live consumer:");
    SimpleFeatureIterator featureIterator = featureCollection.features();
    SimpleFeature feature1 = featureIterator.next();
    SimpleFeature feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    // REPLAY CONSUMER - will obtain the state of SimpleFeatures at any specified time
    // Replay consumer requires a ReplayConfig which takes a time range and a
    // duration of time to process
    System.out.println("\nConsuming with the replay consumer...");
    Duration readBehind = new Duration(1000); // 1 second readBehind
    ReplayConfig rc = new ReplayConfig(replayStart, replayEnd, readBehind);
    SimpleFeatureType replaySFT = KafkaDataStoreHelper.createReplaySFT(preppedOutputSft, rc);
    producerDS.createSchema(replaySFT);
    SimpleFeatureSource replayConsumerFS = consumerDS.getFeatureSource(replaySFT.getName());

    // querying for the state of SimpleFeatures approximately 5 seconds before the replayEnd.
    // the ReplayKafkaConsumerFeatureSource will build the state of SimpleFeatures
    // by processing all of the messages that were sent in between queryTime-readBehind and queryTime.
    // only the messages in between replayStart and replayEnd are cached.
    Instant queryTime = replayEnd.minus(5000);
    featureCollection = replayConsumerFS.getFeatures(ReplayTimeHelper.toFilter(queryTime));
    System.out.println(featureCollection.size() + " features were written to Kafka");

    System.out.println("Here are the two SimpleFeatures that were obtained with the replay consumer:");
    featureIterator = featureCollection.features();
    feature1 = featureIterator.next();
    feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    if (System.getProperty("clear") != null) {
        // Run Java command with -Dclear=true
        // This will cause a 'clear'
        producerFS.removeFeatures(Filter.INCLUDE);
    }

    System.exit(0);
}

From source file:com.example.geomesa.kafka10.KafkaQuickStart.java

License:Open Source License

public static void main(String[] args) throws Exception {
    // read command line args for a connection to Kafka
    CommandLineParser parser = new BasicParser();
    Options options = getCommonRequiredOptions();
    CommandLine cmd = parser.parse(options, args);

    // create the producer and consumer KafkaDataStore objects
    Map<String, String> dsConf = getKafkaDataStoreConf(cmd);
    dsConf.put("isProducer", "true");
    DataStore producerDS = DataStoreFinder.getDataStore(dsConf);
    dsConf.put("isProducer", "false");
    DataStore consumerDS = DataStoreFinder.getDataStore(dsConf);

    // verify that we got back our KafkaDataStore objects properly
    if (producerDS == null) {
        throw new Exception("Null producer KafkaDataStore");
    }//from  w ww . j  a  va  2  s.  co  m
    if (consumerDS == null) {
        throw new Exception("Null consumer KafkaDataStore");
    }

    // create the schema which creates a topic in Kafka
    // (only needs to be done once)
    final String sftName = "KafkaQuickStart10";
    final String sftSchema = "name:String,age:Int,dtg:Date,*geom:Point:srid=4326";
    SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema);
    // set zkPath to default if not specified
    String zkPath = (dsConf.get(ZK_PATH) == null) ? "/geomesa/ds/kafka" : dsConf.get(ZK_PATH);
    SimpleFeatureType preppedOutputSft = KafkaDataStoreHelper.createStreamingSFT(sft, zkPath);
    // only create the schema if it hasn't been created already
    if (!Arrays.asList(producerDS.getTypeNames()).contains(sftName))
        producerDS.createSchema(preppedOutputSft);
    if (!cmd.hasOption("automated")) {
        System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)");
        System.in.read();
    }

    // the live consumer must be created before the producer writes features
    // in order to read streaming data.
    // i.e. the live consumer will only read data written after its instantiation
    SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName);
    SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName);

    // creates and adds SimpleFeatures to the producer every 1/5th of a second
    System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes");
    Instant replayStart = new Instant();

    String vis = cmd.getOptionValue(VISIBILITY);
    if (vis != null)
        System.out.println("Writing features with " + vis);
    addSimpleFeatures(sft, producerFS, vis);
    Instant replayEnd = new Instant();

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    SimpleFeatureCollection featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    addDeleteNewFeature(sft, producerFS);

    // read from Kafka after writing all the features.
    // LIVE CONSUMER - will obtain the current state of SimpleFeatures
    System.out.println("\nConsuming with the live consumer...");
    featureCollection = consumerFS.getFeatures();
    System.out.println(featureCollection.size() + " features were written to Kafka");

    // the state of the two SimpleFeatures is real time here
    System.out.println("Here are the two SimpleFeatures that were obtained with the live consumer:");
    SimpleFeatureIterator featureIterator = featureCollection.features();
    SimpleFeature feature1 = featureIterator.next();
    SimpleFeature feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    // REPLAY CONSUMER - will obtain the state of SimpleFeatures at any specified time
    // Replay consumer requires a ReplayConfig which takes a time range and a
    // duration of time to process
    System.out.println("\nConsuming with the replay consumer...");
    Duration readBehind = new Duration(1000); // 1 second readBehind
    ReplayConfig rc = new ReplayConfig(replayStart, replayEnd, readBehind);
    SimpleFeatureType replaySFT = KafkaDataStoreHelper.createReplaySFT(preppedOutputSft, rc);
    producerDS.createSchema(replaySFT);
    SimpleFeatureSource replayConsumerFS = consumerDS.getFeatureSource(replaySFT.getName());

    // querying for the state of SimpleFeatures approximately 5 seconds before the replayEnd.
    // the ReplayKafkaConsumerFeatureSource will build the state of SimpleFeatures
    // by processing all of the messages that were sent in between queryTime-readBehind and queryTime.
    // only the messages in between replayStart and replayEnd are cached.
    Instant queryTime = replayEnd.minus(5000);
    featureCollection = replayConsumerFS.getFeatures(ReplayTimeHelper.toFilter(queryTime));
    System.out.println(featureCollection.size() + " features were written to Kafka");

    System.out.println("Here are the two SimpleFeatures that were obtained with the replay consumer:");
    featureIterator = featureCollection.features();
    feature1 = featureIterator.next();
    feature2 = featureIterator.next();
    featureIterator.close();
    printFeature(feature1);
    printFeature(feature2);

    if (System.getProperty("clear") != null) {
        // Run Java command with -Dclear=true
        // This will cause a 'clear'
        producerFS.removeFeatures(Filter.INCLUDE);
    }

    System.exit(0);
}

From source file:com.example.time.SystemClock.java

License:Open Source License

@Override
public Instant now() {
    return new Instant();
}