Example usage for org.joda.time Duration Duration

List of usage examples for org.joda.time Duration Duration

Introduction

In this page you can find the example usage for org.joda.time Duration Duration.

Prototype

public Duration(Object duration) 

Source Link

Document

Creates a duration from the specified object using the org.joda.time.convert.ConverterManager ConverterManager .

Usage

From source file:edu.jhu.hlt.concrete.gigaword.expt.ConvertGigawordDocuments.java

License:Open Source License

/**
 * @param args// w w  w.j a v  a  2s .c om
 */
public static void main(String... args) {
    Thread.setDefaultUncaughtExceptionHandler(new UncaughtExceptionHandler() {

        @Override
        public void uncaughtException(Thread t, Throwable e) {
            logger.error("Thread {} caught unhandled exception.", t.getName());
            logger.error("Unhandled exception.", e);
        }
    });

    if (args.length != 2) {
        logger.info("Usage: {} {} {}", GigawordConcreteConverter.class.getName(), "path/to/expt/file",
                "path/to/out/folder");
        System.exit(1);
    }

    String exptPathStr = args[0];
    String outPathStr = args[1];

    // Verify path points to something.
    Path exptPath = Paths.get(exptPathStr);
    if (!Files.exists(exptPath)) {
        logger.error("File: {} does not exist. Re-run with the correct path to "
                + " the experiment 2 column file. See README.md.");
        System.exit(1);
    }

    logger.info("Experiment map located at: {}", exptPathStr);

    // Create output dir if not yet created.
    Path outPath = Paths.get(outPathStr);
    if (!Files.exists(outPath)) {
        logger.info("Creating directory: {}", outPath.toString());
        try {
            Files.createDirectories(outPath);
        } catch (IOException e) {
            logger.error("Caught an IOException when creating output dir.", e);
            System.exit(1);
        }
    }

    logger.info("Output directory located at: {}", outPathStr);

    // Read in expt map. See README.md.
    Map<String, Set<String>> exptMap = null;
    try (Reader r = ExperimentUtils.createReader(exptPath); BufferedReader br = new BufferedReader(r)) {
        exptMap = ExperimentUtils.createFilenameToIdMap(br);
    } catch (IOException e) {
        logger.error("Caught an IOException when creating expt map.", e);
        System.exit(1);
    }

    // Start a timer.
    logger.info("Gigaword -> Concrete beginning.");
    StopWatch sw = new StopWatch();
    sw.start();
    // Iterate over expt map.
    exptMap.entrySet()
            // .parallelStream()
            .forEach(p -> {
                final String pathStr = p.getKey();
                final Set<String> ids = p.getValue();
                final Path lp = Paths.get(pathStr);
                logger.info("Converting path: {}", pathStr);

                // Get the file name and immediate folder it is under.
                int nElements = lp.getNameCount();
                Path fileName = lp.getName(nElements - 1);
                Path subFolder = lp.getName(nElements - 2);
                String newFnStr = fileName.toString().split("\\.")[0] + ".tar";

                // Mirror folders in output dir.
                Path localOutFolder = outPath.resolve(subFolder);
                Path localOutPath = localOutFolder.resolve(newFnStr);

                // Create output subfolders.
                if (!Files.exists(localOutFolder) && !Files.isDirectory(localOutFolder)) {
                    logger.info("Creating out file: {}", localOutFolder.toString());
                    try {
                        Files.createDirectories(localOutFolder);
                    } catch (IOException e) {
                        throw new RuntimeException("Caught an IOException when creating output dir.", e);
                    }
                }

                // Iterate over communications.
                Iterator<Communication> citer;
                try (OutputStream os = Files.newOutputStream(localOutPath);
                        BufferedOutputStream bos = new BufferedOutputStream(os);
                        Archiver archiver = new TarArchiver(bos);) {
                    citer = new ConcreteGigawordDocumentFactory().iterator(lp);
                    while (citer.hasNext()) {
                        Communication c = citer.next();
                        String cId = c.getId();

                        // Document ID must be in the set. Remove.
                        boolean wasInSet = ids.remove(cId);
                        if (!wasInSet) {
                            // Some IDs are duplicated in Gigaword.
                            // See ERRATA.
                            logger.debug(
                                    "ID: {} was parsed from path: {}, but was not in the experiment map. Attempting to remove dupe.",
                                    cId, pathStr);

                            // Attempt to create a duplicate id (append .duplicate to the id).
                            // Then, try to remove again.
                            String newId = RepairDuplicateIDs.repairDuplicate(cId);
                            boolean dupeRemoved = ids.remove(newId);
                            // There are not nested duplicates, so this should never fire.
                            if (!dupeRemoved) {
                                logger.info("Failed to remove dupe.");
                                return;
                            } else
                                // Modify the communication ID to the unique version.
                                c.setId(newId);
                        }

                        archiver.addEntry(new ArchivableCommunication(c));
                    }

                    logger.info("Finished path: {}", pathStr);
                } catch (ConcreteException ex) {
                    logger.error("Caught ConcreteException during Concrete mapping.", ex);
                    logger.error("Path: {}", pathStr);
                } catch (IOException e) {
                    logger.error("Error archiving communications.", e);
                    logger.error("Path: {}", localOutPath.toString());
                }
            });

    sw.stop();
    logger.info("Finished.");
    Minutes m = new Duration(sw.getTime()).toStandardMinutes();
    logger.info("Runtime: Approximately {} minutes.", m.getMinutes());
}

From source file:edu.jhu.hlt.concrete.stanford.ConcreteStanfordRunner.java

License:Open Source License

public void run(Path inPath, Path outPath, Analytic<? extends TokenizedCommunication> analytic) {
    LOGGER.debug("Checking input and output directories.");
    try {//  w w  w. ja  v a 2  s  .  c o  m
        prepareInputOutput(inPath, outPath);
    } catch (IOException e) {
        LOGGER.error("Caught IOException when checking input and output directories.", e);
    }

    String lowerOutPathStr = inPath.toString().toLowerCase();
    try {
        sed.disable();

        // Outcomes of outPathStr ending:
        // No valid ending (program exit)
        // Ends with .concrete (first if)
        // Ends with .tar (else, first if)
        // Ends with .tar.gz (else, second if)

        boolean isTarExt = lowerOutPathStr.endsWith(".tar");
        boolean isTarGzExt = lowerOutPathStr.endsWith(".tar.gz") || lowerOutPathStr.endsWith(".tgz");
        boolean isConcreteExt = lowerOutPathStr.endsWith(".concrete") || lowerOutPathStr.endsWith(".comm");

        int nElementsInitPath = inPath.getNameCount();
        Path inputFileName = inPath.getName(nElementsInitPath - 1);

        // If no extention matches, exit.
        if (!isTarExt && !isTarGzExt && !isConcreteExt) {
            LOGGER.error("Input file extension was not '.concrete', '.comm', '.tar', or '.tar.gz'; exiting.");
            System.exit(1);
        } else if (isConcreteExt) {
            // IF .concrete, run single communication.
            LOGGER.info("Annotating single .concrete file at: {}", inPath.toString());
            try (InputStream in = Files.newInputStream(inPath);
                    BufferedInputStream bin = new BufferedInputStream(in, 1024 * 8 * 24);) {
                byte[] inputBytes = IOUtils.toByteArray(bin);
                Communication c = ser.fromBytes(inputBytes);
                WrappedCommunication annotated = analytic.annotate(c);
                Communication ar = annotated.getRoot();
                WritableCommunication wc = new WritableCommunication(ar);
                if (Files.isDirectory(outPath))
                    wc.writeToFile(outPath.resolve(inputFileName), true);
                else
                    wc.writeToFile(outPath, true);
            } catch (AnalyticException e) {
                LOGGER.error("Caught exception when running the analytic.", e);
            }
        } else {

            Path localOutPath;
            if (Files.isDirectory(outPath))
                // if directory, use same extension as input.
                localOutPath = outPath.resolve(inputFileName);
            else
                localOutPath = outPath;

            // Iterate over the archive.
            AutoCloseableIterator<byte[]> iter;
            try (InputStream is = Files.newInputStream(inPath);
                    BufferedInputStream bis = new BufferedInputStream(is, 1024 * 8 * 24);) {

                // open iterator based on file extension
                iter = isTarExt ? new TarArchiveEntryByteIterator(bis) : new TarGzArchiveEntryByteIterator(bis);
                try (OutputStream os = Files.newOutputStream(localOutPath);
                        BufferedOutputStream bos = new BufferedOutputStream(os, 1024 * 8 * 24);) {
                    TarArchiver archiver = isTarExt ? new TarArchiver(bos)
                            : new TarArchiver(new GzipCompressorOutputStream(bos));

                    final StopWatch sw = new StopWatch();
                    sw.start();

                    int docCtr = 0;
                    final AtomicInteger tokenCtr = new AtomicInteger(0);
                    LOGGER.info("Iterating over archive: {}", inPath.toString());
                    while (iter.hasNext()) {
                        Communication n = ser.fromBytes(iter.next());
                        LOGGER.info("Annotating communication: {}", n.getId());
                        try {
                            TokenizedCommunication a = analytic.annotate(n);
                            a.getTokenizations().parallelStream()
                                    .map(tkzToInt -> tkzToInt.getTokenList().getTokenListSize())
                                    .forEach(ct -> tokenCtr.addAndGet(ct));
                            archiver.addEntry(new ArchivableCommunication(a.getRoot()));
                            docCtr++;
                        } catch (AnalyticException | IOException | StringIndexOutOfBoundsException e) {
                            LOGGER.error("Caught exception processing document: " + n.getId(), e);
                        }
                    }

                    try {
                        archiver.close();
                        iter.close();
                    } catch (Exception e) {
                        // unlikely.
                        LOGGER.info("Caught exception closing iterator.", e);
                    }

                    sw.stop();
                    Duration rt = new Duration(sw.getTime());
                    Seconds st = rt.toStandardSeconds();
                    Minutes m = rt.toStandardMinutes();
                    int minutesInt = m.getMinutes();

                    LOGGER.info("Complete.");
                    LOGGER.info("Runtime: approximately {} minutes.", minutesInt);
                    LOGGER.info("Processed {} documents.", docCtr);
                    final int tokens = tokenCtr.get();
                    LOGGER.info("Processed {} tokens.", tokens);
                    if (docCtr > 0 && minutesInt > 0) {
                        final float minutesFloat = minutesInt;
                        float perMin = docCtr / minutesFloat;
                        LOGGER.info("Processed approximately {} documents/minute.", perMin);
                        LOGGER.info("Processed approximately {} tokens/second.",
                                st.getSeconds() / minutesFloat);
                    }
                }
            }
        }
    } catch (IOException | ConcreteException e) {
        LOGGER.error("Caught exception while running the analytic over archive.", e);
    }
}

From source file:edu.jhu.hlt.concrete.stanford.Runner.java

License:Open Source License

/**
 * @param args//w w  w.j a v a 2 s  .c  o m
 */
public static void main(String... args) {
    Thread.setDefaultUncaughtExceptionHandler(new LoggedUncaughtExceptionHandler());

    Runner run = new Runner();
    JCommander jc = new JCommander(run, args);
    jc.setProgramName(Runner.class.getName());
    if (run.help) {
        jc.usage();
        System.exit(0);
    }

    int nDocsSeen = 0;
    int nDocsFailed = 0;
    List<String> exIds = new ArrayList<>();
    boolean haveSeenException = false;

    Path outF = Paths.get(run.outputPath);
    Path inp = Paths.get(run.inputPath);
    LOGGER.info("Input path: {}", inp.toString());
    LOGGER.info("Output folder: {}", outF.toString());

    try {
        new ExistingNonDirectoryFile(inp);
        if (!Files.exists(outF)) {
            LOGGER.info("Creating output directory.");
            Files.createDirectories(outF);
        }

        Path outFile = outF.resolve(run.outputName);
        if (Files.exists(outFile)) {
            if (run.overwrite)
                Files.delete(outFile);
            else {
                LOGGER.info("File exists and overwrite = false. Not continuing.");
                System.exit(1);
            }
        }

        PipelineLanguage lang = PipelineLanguage.getEnumeration(run.lang);
        Analytic<? extends WrappedCommunication> a;
        if (run.isInputTokenized)
            a = new AnnotateTokenizedConcrete(lang);
        else
            a = new AnnotateNonTokenizedConcrete(lang);

        StopWatch sw = new StopWatch();
        sw.start();
        LOGGER.info("Beginning ingest at: {}", new DateTime().toString());
        try (InputStream in = Files.newInputStream(inp);
                OutputStream os = Files.newOutputStream(outFile);
                GzipCompressorOutputStream gout = new GzipCompressorOutputStream(os);
                TarArchiver arch = new TarArchiver(gout);) {
            TarGzArchiveEntryCommunicationIterator iter = new TarGzArchiveEntryCommunicationIterator(in);
            while (iter.hasNext()) {
                Communication c = iter.next();
                nDocsSeen++;
                try {
                    arch.addEntry(new ArchivableCommunication(a.annotate(c).getRoot()));
                } catch (AnalyticException e) {
                    LOGGER.warn("Caught analytic exception on document: " + c.getId());
                    nDocsFailed++;
                    exIds.add(c.getId());
                    haveSeenException = true;
                    if (run.exitOnException)
                        break;
                }
            }
        }

        if (run.exitOnException && haveSeenException)
            System.exit(1);

        sw.stop();
        LOGGER.info("Ingest completed at: {}", new DateTime().toString());
        Duration d = new Duration(sw.getTime());
        Period p = d.toPeriod();
        LOGGER.info("Ingest took {}d{}m{}s.", p.getDays(), p.getMinutes(), p.getSeconds());
        final int seenLessFailed = nDocsSeen - nDocsFailed;
        float ratio = nDocsSeen > 0 ? (float) seenLessFailed / nDocsSeen * 100 : 0;
        LOGGER.info("Converted {}% of documents successfully. [{} / {} total]", ratio, seenLessFailed,
                nDocsSeen);
        if (haveSeenException)
            exIds.forEach(eid -> LOGGER.info("Caught exception on document: {}", eid));
    } catch (IOException | NotFileException e) {
        throw new RuntimeException(e);
    }
}

From source file:gov.usgs.anss.query.EdgeQueryOptions.java

License:Open Source License

/**
 * @param seconds the offset to set/*w w w.j  a v a 2  s. c o  m*/
 */
public void setOffset(double seconds) {
    this.offset = new Duration((long) (seconds * 1000));
}

From source file:griffon.plugins.jodatime.editors.DurationPropertyEditor.java

License:Apache License

protected void setValueInternal(Object value) {
    if (null == value) {
        super.setValueInternal(null);
    } else if (value instanceof CharSequence) {
        handleAsString(String.valueOf(value));
    } else if (value instanceof Duration) {
        super.setValueInternal(value);
    } else if (value instanceof Number) {
        super.setValueInternal(new Duration(((Number) value).longValue()));
    } else {//  w  w  w.  ja  v  a  2  s  . c o  m
        throw illegalValue(value, Duration.class);
    }
}

From source file:griffon.plugins.jodatime.editors.DurationPropertyEditor.java

License:Apache License

private void handleAsString(String str) {
    if (isBlank(str)) {
        super.setValueInternal(null);
        return;//from  www. j a  v a2  s . co m
    }

    try {
        super.setValueInternal(new Duration(Long.parseLong(str)));
        return;
    } catch (NumberFormatException nfe) {
        // ignore
    }

    try {
        super.setValueInternal(Duration.parse(str));
    } catch (IllegalArgumentException e) {
        throw illegalValue(str, Duration.class, e);
    }
}

From source file:griffon.plugins.jodatime.JodatimeExtension.java

License:Apache License

public static Duration toDuration(Number number) {
    return new Duration(abs(number.longValue()));
}

From source file:griffon.plugins.scaffolding.atoms.DurationValue.java

License:Apache License

@Override
public void setValue(Object value) {
    if (value == null || value instanceof Duration) {
        super.setValue(value);
    } else if (value instanceof Number) {
        super.setValue(new Duration(abs(((Number) value).longValue())));
    } else {/*from  ww  w.  jav  a  2s.c  om*/
        throw new IllegalArgumentException("Invalid value " + value);
    }
}

From source file:io.dropwizard.primer.auth.PrimerAuthorizationRegistry.java

License:Apache License

public static void init(PrimerAuthorizationMatrix matrix, Set<String> whiteListUrls,
        PrimerBundleConfiguration configuration, JsonWebTokenParser tokenParser,
        HmacSHA512Verifier tokenVerifier) {
    authList = new HashMap<>();
    whiteList = new ArrayList<>();
    urlPatterns = new ArrayList<>();
    parser = tokenParser;/*  ww w.  java 2 s .  c o  m*/
    verifier = tokenVerifier;
    val urlToAuthMap = new HashMap<String, PrimerAuthorization>();
    val tokenMatch = Pattern.compile("\\{(([^/])+\\})");
    if (matrix != null) {
        if (matrix.getAuthorizations() != null) {
            matrix.getAuthorizations().forEach(auth -> {
                final String pattern = generatePathExpression(auth.getUrl());
                urlPatterns.add(pattern);
                urlToAuthMap.put(pattern, auth);
            });
        }
        if (matrix.getStaticAuthorizations() != null) {
            matrix.getStaticAuthorizations().forEach(auth -> {
                final String pattern = generatePathExpression(auth.getUrl());
                urlPatterns.add(pattern);
                urlToAuthMap.put(pattern, auth);
            });
        }
        if (matrix.getAutoAuthorizations() != null) {
            matrix.getAutoAuthorizations().forEach(auth -> {
                final String pattern = generatePathExpression(auth.getUrl());
                urlPatterns.add(pattern);
                urlToAuthMap.put(pattern, auth);
            });
        }
        Collections.sort(urlPatterns,
                (o1, o2) -> tokenMatch.matcher(o2).groupCount() - tokenMatch.matcher(o1).groupCount());
        Collections.sort(urlPatterns, (o1, o2) -> o2.compareTo(o1));
        urlPatterns.forEach(pattern -> authList.put(pattern, urlToAuthMap.get(pattern)));
    }
    whiteListUrls.forEach(url -> whiteList.add(generatePathExpression(url)));
    Collections.sort(whiteList,
            (o1, o2) -> tokenMatch.matcher(o2).groupCount() - tokenMatch.matcher(o1).groupCount());
    Collections.sort(whiteList, (o1, o2) -> o2.compareTo(o1));
    blacklistCache = CacheBuilder.newBuilder()
            .expireAfterWrite(configuration.getCacheExpiry(), TimeUnit.SECONDS)
            .maximumSize(configuration.getCacheMaxSize()).build(new CacheLoader<String, Optional<Boolean>>() {
                @Override
                public Optional<Boolean> load(String key) throws Exception {
                    return Optional.of(false);
                }
            });
    lruCache = CacheBuilder.newBuilder().expireAfterWrite(configuration.getCacheExpiry(), TimeUnit.SECONDS)
            .maximumSize(configuration.getCacheMaxSize()).build(new CacheLoader<TokenKey, JsonWebToken>() {
                @Override
                public JsonWebToken load(TokenKey key) throws Exception {
                    return verifyToken(key);
                }
            });
    expiryValidator = new ExpiryValidator(new Duration(configuration.getClockSkew()));
}

From source file:io.druid.common.config.ConfigManager.java

License:Apache License

@LifecycleStart
public void start() {
    synchronized (lock) {
        if (started) {
            return;
        }// w  w  w . j  a v  a 2s. c  o  m

        poller = new PollingCallable();
        ScheduledExecutors.scheduleWithFixedDelay(exec, new Duration(0),
                config.get().getPollDuration().toStandardDuration(), poller);

        started = true;
    }
}