Example usage for org.joda.time Duration standardMinutes

List of usage examples for org.joda.time Duration standardMinutes

Introduction

In this page you can find the example usage for org.joda.time Duration standardMinutes.

Prototype

public static Duration standardMinutes(long minutes) 

Source Link

Document

Create a duration with the specified number of minutes assuming that there are the standard number of milliseconds in a minute.

Usage

From source file:org.obm.push.utils.DateUtils.java

License:Open Source License

public static int minutesToSeconds(long minutes) {
    return Ints.checkedCast(Duration.standardMinutes(minutes).getStandardSeconds());
}

From source file:org.smartdeveloperhub.harvesters.it.backend.crawler.jira.factories.IssueFactory.java

License:Apache License

/**
 * This method creates an {@link Issue} from Jira issues.
 * @param jiraIssue for retrieve issue information.
 * @param contributors map of contributors stored by id
 * @return {@link Issue}/*from www . jav a 2  s . c  o  m*/
 */
public Issue createIssue(com.atlassian.jira.rest.client.api.domain.Issue jiraIssue,
        Map<String, Contributor> contributors) {

    Issue issue = new Issue();

    issue.setId(String.valueOf(jiraIssue.getKey()));
    issue.setProjectId(jiraIssue.getProject().getKey());
    issue.setCreationDate(jiraIssue.getCreationDate());
    issue.setDescription(jiraIssue.getDescription());
    issue.setReporter(jiraIssue.getReporter().getName());
    issue.setName(jiraIssue.getSummary());

    User assignee = jiraIssue.getAssignee();

    if (assignee != null) {

        issue.setAssignee(assignee.getName());
    }

    // Prepare structures to explore changes looking for open and close dates.
    Stack<DateTime> openDate = new Stack<>();
    Stack<DateTime> closeDate = new Stack<>();

    issue.setChanges(createChangeLog(jiraIssue, contributors, openDate, closeDate));
    issue.setOpened(openDate.peek());
    issue.setClosed(closeDate.peek());
    issue.setDueTo(jiraIssue.getDueDate());
    TimeTracking track = jiraIssue.getTimeTracking();
    if (track != null) {

        Integer originalEstimatedMin = track.getOriginalEstimateMinutes();
        if (originalEstimatedMin != null) {

            issue.setEstimatedTime(Duration.standardMinutes(originalEstimatedMin));
        } else {
            LOGGER.info("No original estimated time for Issue {}: {}", issue.getId(), issue.getEstimatedTime());
        }
    } else {
        LOGGER.info("No time tracking available for issue {}", issue.getId());
    }
    issue.setStatus(createStatus(jiraIssue));
    issue.setPriority(fromMap(jiraIssue.getPriority().getName(), priorityMapping));
    issue.setSeverity(fromMap(jiraIssue.getPriority().getName(), severityMapping));
    issue.setType(fromMap(jiraIssue.getIssueType().getName(), typeMapping));

    issue.setVersions(getVersions(jiraIssue));
    issue.setComponents(getComponents(jiraIssue));

    issue.setChildIssues(getChildIssuesById(jiraIssue));
    issue.setBlockedIssues(getBlockedIssuesById(jiraIssue));
    issue.setTags(jiraIssue.getLabels());

    // TODO: not available.
    //      issue.setCommits(commits);

    return issue;
}

From source file:org.smartdeveloperhub.harvesters.it.testing.generator.ProjectActivityGenerator.java

License:Apache License

private Duration estimateEffort(final LocalDateTime start, final LocalDateTime dueTo) {
    final Days daysBetween = Days.daysBetween(start, dueTo);
    int workingDays = 0;
    for (int i = 0; i < daysBetween.getDays(); i++) {
        if (Utils.isWorkingDay(start.toLocalDate().plusDays(i))) {
            workingDays++;//from  ww  w  .  j  av  a 2 s  .c o  m
        }
    }
    final int maxMinutes = workingDays * this.workDay.effortPerDay();
    final double ratio = (100 + this.random.nextInt(900)) / 1000d;
    Duration result = Duration.standardMinutes(
            33 * maxMinutes / 100 + DoubleMath.roundToInt(67 * maxMinutes / 100 * ratio, RoundingMode.CEILING));
    if (result.isShorterThan(MINIMUM_EFFORT)) {
        result = MINIMUM_EFFORT;
    }
    return result;
}

From source file:PartnerTraining.Exercise11Part2.java

License:Apache License

public static void main(String[] args) {
    Pipeline p = Pipeline.create(PipelineOptionsFactory.fromArgs(args).withValidation().create());

    String filePath = "gs://deft-foegler/";
    if (p.getOptions().getRunner().getSimpleName().equals("DirectPipelineRunner")) {
        // The location of small test files on your local machine
        filePath = "/Users/foegler/Documents/";
    } else {/*  w w w  .  ja v a  2s  .  c  o m*/
        // Your staging location or any other cloud storage location where you will upload files.
        filePath = "gs://deft-foegler/";
    }

    // Read the log lines from file.
    p.apply(TextIO.Read.from(filePath + "package_log.txt"))
            // Parse the log lines into objects.
            .apply(ParDo.of(new PackageActivityInfo.ParseLine()))
            // Since bounded data sources do not contain timestamps, we need to
            // emit each element from the PCollection with the time as the
            // timestamp.
            .apply(ParDo.of(new DoFn<PackageActivityInfo, PackageActivityInfo>() {
                public void processElement(ProcessContext c) {
                    // Extract the timestamp from log entry we're currently processing.
                    Instant logTimeStamp = new Instant(((PackageActivityInfo) c.element()).getTime().getTime());
                    // Use outputWithTimestamp to emit the log entry with timestamp attached.
                    c.outputWithTimestamp(c.element(), logTimeStamp);
                }
            }))
            // Define a hour long window for the data.
            .apply(Window.<PackageActivityInfo>into(FixedWindows.of(Duration.standardMinutes(60))))
            // Extract the location key from each object.
            .apply(WithKeys.of(new SerializableFunction<PackageActivityInfo, String>() {
                public String apply(PackageActivityInfo s) {
                    return s.getLocation();
                }
            }))
            // Count the objects from the same hour, per location.
            .apply(Count.<String, PackageActivityInfo>perKey())
            // Format the output.  Need to use a ParDo since need access
            // to the window time.
            .apply(ParDo.of(new FormatOutput()))
            // Report the results to file. 
            .apply(TextIO.Write.named("WritePerHourCounts").to(filePath + "per_hour_per_location_count.txt"));
    p.run();
}

From source file:PartnerTraining.Exercise11Part3.java

License:Apache License

public static void main(String[] args) {
    Pipeline p = Pipeline.create(PipelineOptionsFactory.fromArgs(args).withValidation().create());

    String filePath = "gs://deft-foegler/";
    if (p.getOptions().getRunner().getSimpleName().equals("DirectPipelineRunner")) {
        // The location of small test files on your local machine
        filePath = "/Users/foegler/Documents/";
    } else {// w  w w  .j a v  a 2s.c  o  m
        // Your staging location or any other cloud storage location where you will upload files.
        filePath = "gs://deft-foegler/";
    }

    // Define the table schema for the BigQuery output table.
    List<TableFieldSchema> fields = new ArrayList<>();
    fields.add(new TableFieldSchema().setName("location").setType("STRING"));
    fields.add(new TableFieldSchema().setName("count").setType("INTEGER"));
    fields.add(new TableFieldSchema().setName("timestamp").setType("TIMESTAMP"));
    TableSchema schema = new TableSchema().setFields(fields);

    // Read the log lines from file.
    p.apply(TextIO.Read.from(filePath + "package_log.txt"))
            // Parse the log lines into objects.
            .apply(ParDo.of(new PackageActivityInfo.ParseLine()))
            // Since bounded data sources do not contain timestamps, we need to
            // emit each element from the PCollection with the time as the
            // timestamp.
            .apply(ParDo.of(new DoFn<PackageActivityInfo, PackageActivityInfo>() {
                public void processElement(ProcessContext c) {
                    // Extract the timestamp from log entry we're currently processing.
                    Instant logTimeStamp = new Instant(((PackageActivityInfo) c.element()).getTime().getTime());
                    // Use outputWithTimestamp to emit the log entry with timestamp attached.
                    c.outputWithTimestamp(c.element(), logTimeStamp);
                }
            }))
            // Define a hour long window for the data.
            .apply(Window.<PackageActivityInfo>into(FixedWindows.of(Duration.standardMinutes(60))))
            // Extract the location key from each object.
            .apply(WithKeys.of(new SerializableFunction<PackageActivityInfo, String>() {
                public String apply(PackageActivityInfo s) {
                    return s.getLocation();
                }
            }))
            // Count the objects from the same hour, per location.
            .apply(Count.<String, PackageActivityInfo>perKey())
            // Format the output.  Need to use a ParDo since need access
            // to the window time.
            .apply(ParDo.of(new WindowCountsToRows()))
            // Write the Table rows to the output table.  The dataset must already exist
            // before executing this command.  If you have not created it, use the BigQuery
            // UI in the Developers Console to create the dataset.
            //
            // With the option CREATE_IF_NEEDED, the table will be created if it doesn't
            // already exist.
            // Use the BigQuery Query UI to verify your export:
            // SELECT * FROM partner_training_dataset.package_info LIMIT 5;
            .apply(BigQueryIO.Write.named("BigQuery-Write")
                    .to("google.com:deft-testing-integration:partner_training_dataset.package_counts")
                    .withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED)
                    .withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_TRUNCATE).withSchema(schema));
    p.run();
}

From source file:PartnerTraining.Exercise13.java

License:Apache License

public static void main(String[] args) {
    PipelineOptions options = PipelineOptionsFactory.fromArgs(args).withValidation().create();
    // Convert to DataflowPipelineOptions and set streaming to true
    DataflowPipelineOptions dataflowOptions = options.as(DataflowPipelineOptions.class);
    dataflowOptions.setStreaming(true);/*from   w  ww  .j  a v  a  2 s.c  om*/
    // Create the pipeline with the new options
    Pipeline p = Pipeline.create(dataflowOptions);

    // The table to write to PROJECT:DATASET:TABLE
    String table = "google.com:deft-testing-integration:partner_training_dataset.package_counts_streaming";

    // Define the table schema for the BigQuery output table.
    List<TableFieldSchema> fields = new ArrayList<>();
    fields.add(new TableFieldSchema().setName("location").setType("STRING"));
    fields.add(new TableFieldSchema().setName("count").setType("INTEGER"));
    fields.add(new TableFieldSchema().setName("timestamp").setType("TIMESTAMP"));
    TableSchema schema = new TableSchema().setFields(fields);

    // Read in PackageActivityInfo objects. Here we just generate them on the fly
    // but a real pipeline might read them from PubSub or another unbounded source.
    p.apply(new GenericUnboundedSourceGenerator())
            // Define a hour long window for the data.
            .apply(Window.<PackageActivityInfo>into(FixedWindows.of(Duration.standardMinutes(1))))
            // Extract the location key from each object.
            .apply(WithKeys.of(new SerializableFunction<PackageActivityInfo, String>() {
                public String apply(PackageActivityInfo s) {
                    return s.getLocation();
                }
            }))
            // Count the objects from the same hour, per location.
            .apply(Count.<String, PackageActivityInfo>perKey())
            // Format the output.  Need to use a ParDo since need access
            // to the window time.
            .apply(ParDo.of(new WindowCountsToRows()))
            // Write the Table rows to the output table.  The dataset must already exist
            // before executing this command.  If you have not created it, use the BigQuery
            // UI in the Developers Console to create the dataset.
            //
            // With the option CREATE_IF_NEEDED, the table will be created if it doesn't
            // already exist.
            // WRITE_APPEND as we want to append results to the table as we go.
            // Use the BigQuery Query UI to verify your export:
            // SELECT * FROM partner_training_dataset.package_info LIMIT 5;
            .apply(BigQueryIO.Write.named("BigQuery-Write").to(table)
                    .withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED)
                    .withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_APPEND).withSchema(schema));
    p.run();
}