Example usage for java.time LocalDateTime isAfter

List of usage examples for java.time LocalDateTime isAfter

Introduction

In this page you can find the example usage for java.time LocalDateTime isAfter.

Prototype

@Override 
public boolean isAfter(ChronoLocalDateTime<?> other) 

Source Link

Document

Checks if this date-time is after the specified date-time.

Usage

From source file:com.otway.picasasync.syncutil.SyncManager.java

private List<AlbumSync> getRemoteDownloadList(List<AlbumEntry> remoteAlbums, final File rootFolder,
        LocalDateTime oldestDate) throws ServiceException, IOException {
    HashSet<String> uniqueNames = new HashSet<String>();
    List<AlbumSync> result = new ArrayList<AlbumSync>();

    // If this is false, we only care about instant upload albums.
    boolean nonInstantUploadAlbums = settings.getDownloadChanged() || settings.getUploadChanged()
            || settings.getDownloadNew() || settings.getUploadNew();

    for (AlbumEntry album : remoteAlbums) {

        String title = album.getTitle().getPlainText();
        boolean isInstantUploadType = PicasawebClient.isInstantUpload(album);

        if (oldestDate.isAfter(getTimeFromMS(album.getUpdated().getValue()))) {
            log.debug("Album update date (" + album.getUpdated() + ") too old. Skipping " + title);
            continue;
        }//from w  w w  . j av a  2s . co m

        if (!settings.getAutoBackupDownload() && isInstantUploadType) {
            log.info("Skipping Auto-Backup album: " + title);
            continue;
        }

        if (settings.getExcludeDropBox() && title.equals("Drop Box")) {
            log.info("Skipping DropBox album.");
            continue;
        }

        String suffix = "";

        if (uniqueNames.contains(title)) {
            log.info(" Duplicate online album: " + title + " (" + album.getName() + ") - skipping...");
            continue;
        }

        uniqueNames.add(title);

        // Might need to convert some auto-backup style folder names, which have slashes
        File albumFolder = PicasawebClient.getFolderNameForAlbum(rootFolder, album);

        if (!isInstantUploadType && !suffix.isEmpty()) {

            // If it's not AutoBackup, add the suffix to differentiate duplicate titles
            albumFolder = new File(albumFolder.getParent(), albumFolder.getName() + suffix);
        }

        if (!isInstantUploadType && !nonInstantUploadAlbums)
            continue;

        result.add(new AlbumSync(album, albumFolder, this, settings));
    }

    return result;
}

From source file:com.streamsets.pipeline.stage.origin.jdbc.cdc.oracle.OracleCDCSource.java

@Override
public List<ConfigIssue> init() {
    List<ConfigIssue> issues = super.init();
    errorRecordHandler = new DefaultErrorRecordHandler(getContext());
    useLocalBuffering = !getContext().isPreview() && configBean.bufferLocally;
    if (!hikariConfigBean.driverClassName.isEmpty()) {
        try {/*from ww  w. java  2 s  .  c o  m*/
            Class.forName(hikariConfigBean.driverClassName);
        } catch (ClassNotFoundException e) {
            LOG.error("Hikari Driver class not found.", e);
            issues.add(getContext().createConfigIssue(Groups.LEGACY.name(), DRIVER_CLASSNAME,
                    JdbcErrors.JDBC_28, e.toString()));
        }
    }
    issues = hikariConfigBean.validateConfigs(getContext(), issues);
    if (connection == null) { // For tests, we set a mock connection
        try {
            dataSource = jdbcUtil.createDataSourceForRead(hikariConfigBean);
            connection = dataSource.getConnection();
            connection.setAutoCommit(false);
        } catch (StageException | SQLException e) {
            LOG.error("Error while connecting to DB", e);
            issues.add(
                    getContext().createConfigIssue(Groups.JDBC.name(), CONNECTION_STR, JDBC_00, e.toString()));
            return issues;
        }
    }

    recordQueue = new LinkedBlockingQueue<>(2 * configBean.baseConfigBean.maxBatchSize);
    String container = configBean.pdb;

    List<SchemaAndTable> schemasAndTables;

    try {
        initializeStatements();
        alterSession();
    } catch (SQLException ex) {
        LOG.error("Error while creating statement", ex);
        issues.add(getContext().createConfigIssue(Groups.JDBC.name(), CONNECTION_STR, JDBC_00,
                hikariConfigBean.getConnectionString()));
    }
    zoneId = ZoneId.of(configBean.dbTimeZone);
    dateTimeColumnHandler = new DateTimeColumnHandler(zoneId);
    String commitScnField;
    BigDecimal scn = null;
    try {
        scn = getEndingSCN();
        switch (configBean.startValue) {
        case SCN:
            if (new BigDecimal(configBean.startSCN).compareTo(scn) > 0) {
                issues.add(getContext().createConfigIssue(CDC.name(), "oracleCDCConfigBean.startSCN", JDBC_47,
                        scn.toPlainString()));
            }
            break;
        case LATEST:
            // If LATEST is used, use now() as the startDate and proceed as if a startDate was specified
            configBean.startDate = nowAtDBTz().format(dateTimeColumnHandler.dateFormatter);
            // fall-through
        case DATE:
            try {
                LocalDateTime startDate = dateTimeColumnHandler.getDate(configBean.startDate);
                if (startDate.isAfter(nowAtDBTz())) {
                    issues.add(getContext().createConfigIssue(CDC.name(), "oracleCDCConfigBean.startDate",
                            JDBC_48));
                }
            } catch (DateTimeParseException ex) {
                LOG.error("Invalid date", ex);
                issues.add(
                        getContext().createConfigIssue(CDC.name(), "oracleCDCConfigBean.startDate", JDBC_49));
            }
            break;
        default:
            throw new IllegalStateException("Unknown start value!");
        }
    } catch (SQLException ex) {
        LOG.error("Error while getting SCN", ex);
        issues.add(getContext().createConfigIssue(CREDENTIALS.name(), USERNAME, JDBC_42));
    }

    try (Statement reusedStatement = connection.createStatement()) {
        int majorVersion = getDBVersion(issues);
        // If version is 12+, then the check for table presence must be done in an alternate container!
        if (majorVersion == -1) {
            return issues;
        }
        if (majorVersion >= 12) {
            if (!StringUtils.isEmpty(container)) {
                String switchToPdb = "ALTER SESSION SET CONTAINER = " + configBean.pdb;
                try {
                    reusedStatement.execute(switchToPdb);
                } catch (SQLException ex) {
                    LOG.error("Error while switching to container: " + container, ex);
                    issues.add(getContext().createConfigIssue(Groups.CREDENTIALS.name(), USERNAME, JDBC_40,
                            container));
                    return issues;
                }
                containerized = true;
            }
        }

        schemasAndTables = new ArrayList<>();
        for (SchemaTableConfigBean tables : configBean.baseConfigBean.schemaTableConfigs) {

            tables.schema = configBean.baseConfigBean.caseSensitive ? tables.schema
                    : tables.schema.toUpperCase();
            tables.table = configBean.baseConfigBean.caseSensitive ? tables.table : tables.table.toUpperCase();
            if (tables.excludePattern != null) {
                tables.excludePattern = configBean.baseConfigBean.caseSensitive ? tables.excludePattern
                        : tables.excludePattern.toUpperCase();
            }
            Pattern p = StringUtils.isEmpty(tables.excludePattern) ? null
                    : Pattern.compile(tables.excludePattern);

            try (ResultSet rs = jdbcUtil.getTableAndViewMetadata(connection, tables.schema, tables.table)) {
                while (rs.next()) {
                    String schemaName = rs.getString(TABLE_METADATA_TABLE_SCHEMA_CONSTANT);
                    String tableName = rs.getString(TABLE_METADATA_TABLE_NAME_CONSTANT);
                    if (p == null || !p.matcher(tableName).matches()) {
                        schemaName = schemaName.trim();
                        tableName = tableName.trim();
                        schemasAndTables.add(new SchemaAndTable(schemaName, tableName));
                    }
                }
            }
        }

        validateTablePresence(reusedStatement, schemasAndTables, issues);
        if (!issues.isEmpty()) {
            return issues;
        }
        for (SchemaAndTable schemaAndTable : schemasAndTables) {
            try {
                tableSchemas.put(schemaAndTable, getTableSchema(schemaAndTable));
                if (scn != null) {
                    tableSchemaLastUpdate.put(schemaAndTable, scn);
                }
            } catch (SQLException ex) {
                LOG.error("Error while switching to container: " + container, ex);
                issues.add(getContext().createConfigIssue(Groups.CREDENTIALS.name(), USERNAME, JDBC_50));
            }
        }
        container = CDB_ROOT;
        if (majorVersion >= 12) {
            try {
                switchContainer.execute();
                LOG.info("Switched to CDB$ROOT to start LogMiner.");
            } catch (SQLException ex) {
                // Fatal only if we switched to a PDB earlier
                if (containerized) {
                    LOG.error("Error while switching to container: " + container, ex);
                    issues.add(getContext().createConfigIssue(Groups.CREDENTIALS.name(), USERNAME, JDBC_40,
                            container));
                    return issues;
                }
                // Log it anyway
                LOG.info("Switching containers failed, ignoring since there was no PDB switch", ex);
            }
        }
        commitScnField = majorVersion >= 11 ? "COMMIT_SCN" : "CSCN";
    } catch (SQLException ex) {
        LOG.error("Error while creating statement", ex);
        issues.add(getContext().createConfigIssue(Groups.JDBC.name(), CONNECTION_STR, JDBC_00,
                hikariConfigBean.getConnectionString()));
        return issues;
    }

    final String ddlTracking = shouldTrackDDL ? " + DBMS_LOGMNR.DDL_DICT_TRACKING" : "";

    final String readCommitted = useLocalBuffering ? "" : "+ DBMS_LOGMNR.COMMITTED_DATA_ONLY";

    this.logMinerProcedure = "BEGIN" + " DBMS_LOGMNR.START_LOGMNR(" + " {}," + " {},"
            + " OPTIONS => DBMS_LOGMNR." + configBean.dictionary.name()
            + "          + DBMS_LOGMNR.CONTINUOUS_MINE" + readCommitted
            + "          + DBMS_LOGMNR.NO_SQL_DELIMITER" + ddlTracking + ");" + " END;";

    final String base = "SELECT SCN, USERNAME, OPERATION_CODE, TIMESTAMP, SQL_REDO, TABLE_NAME, "
            + commitScnField
            + ", SEQUENCE#, CSF, XIDUSN, XIDSLT, XIDSQN, RS_ID, SSN, SEG_OWNER, ROLLBACK, ROW_ID "
            + " FROM V$LOGMNR_CONTENTS" + " WHERE ";

    final String tableCondition = getListOfSchemasAndTables(schemasAndTables);

    final String commitRollbackCondition = Utils.format("OPERATION_CODE = {} OR OPERATION_CODE = {}",
            COMMIT_CODE, ROLLBACK_CODE);

    final String operationsCondition = "OPERATION_CODE IN (" + getSupportedOperations() + ")";

    final String restartNonBufferCondition = Utils.format("((" + commitScnField + " = ? AND SEQUENCE# > ?) OR "
            + commitScnField + "  > ?)" + (shouldTrackDDL ? " OR (OPERATION_CODE = {} AND SCN > ?)" : ""),
            DDL_CODE);

    if (useLocalBuffering) {
        selectString = String.format("%s ((%s AND (%s)) OR (%s))", base, tableCondition, operationsCondition,
                commitRollbackCondition);
    } else {
        selectString = base + " (" + tableCondition + " AND (" + operationsCondition + "))" + "AND ("
                + restartNonBufferCondition + ")";
    }

    try {
        initializeLogMnrStatements();
    } catch (SQLException ex) {
        LOG.error("Error while creating statement", ex);
        issues.add(getContext().createConfigIssue(Groups.JDBC.name(), CONNECTION_STR, JDBC_00,
                hikariConfigBean.getConnectionString()));
    }

    if (configBean.dictionary == DictionaryValues.DICT_FROM_REDO_LOGS) {
        try {
            startLogMnrForRedoDict();
        } catch (Exception ex) {
            LOG.warn("Error while attempting to start LogMiner to load dictionary", ex);
            issues.add(getContext().createConfigIssue(Groups.CDC.name(), "oracleCDCConfigBean.dictionary",
                    JDBC_44, ex));
        }
    }

    if (useLocalBuffering && configBean.bufferLocation == BufferingValues.ON_DISK) {
        File tmpDir = new File(System.getProperty("java.io.tmpdir"));
        String relativePath = getContext().getSdcId() + "/" + getContext().getPipelineId() + "/"
                + getContext().getStageInfo().getInstanceName();
        this.txnBufferLocation = new File(tmpDir, relativePath);

        try {
            if (txnBufferLocation.exists()) {
                FileUtils.deleteDirectory(txnBufferLocation);
                LOG.info("Deleted " + txnBufferLocation.toString());
            }
            Files.createDirectories(txnBufferLocation.toPath());
            LOG.info("Created " + txnBufferLocation.toString());
        } catch (IOException ex) {
            Throwables.propagate(ex);
        }
    }

    if (configBean.bufferLocally) {
        if (configBean.parseQuery) {
            parsingExecutor = Executors.newFixedThreadPool(configBean.parseThreadPoolSize,
                    new ThreadFactoryBuilder().setNameFormat("Oracle CDC Origin Parse Thread - %d").build());
        } else {
            parsingExecutor = Executors.newSingleThreadExecutor(
                    new ThreadFactoryBuilder().setNameFormat("Oracle CDC Origin Parse Thread - %d").build());
        }
    }

    if (configBean.txnWindow >= configBean.logminerWindow) {
        issues.add(getContext().createConfigIssue(Groups.CDC.name(), "oracleCDCConfigBean.logminerWindow",
                JDBC_81));
    }
    version = useLocalBuffering ? VERSION_UNCOMMITTED : VERSION_STR;
    delay = getContext().createGauge("Read Lag (seconds)");
    return issues;
}