Example usage for java.util NavigableSet removeAll

List of usage examples for java.util NavigableSet removeAll

Introduction

In this page you can find the example usage for java.util NavigableSet removeAll.

Prototype

boolean removeAll(Collection<?> c);

Source Link

Document

Removes from this set all of its elements that are contained in the specified collection (optional operation).

Usage

From source file:eu.ggnet.dwoss.report.assist.ReportUtil.java

/**
 * Returns all Reportlines, that don't have an impact on the result, but have only informational character.
 * <p>//  w w  w.  j  a  v  a 2 s  . co m
 * It's not allowed to have a null value in the collection.
 * <p>
 * @param lines
 * @return all Reportlines, that don't have an impact on the result, but have only informational character.
 */
public static NavigableSet<ReportLine> filterReportInfo(Collection<ReportLine> lines) {
    NavigableSet<ReportLine> result = new TreeSet<>(lines);
    result.removeAll(filterInvoiced(lines));
    result.removeAll(filterRepayed(lines));
    return result;
}

From source file:eu.ggnet.dwoss.report.assist.ReportUtil.java

/**
 * Removes all Lines, that only represent active Info (open Complaints).
 * <ol>//  w  w w  .  ja  v  a2s.  com
 * <li>Sammle alle only Invoice Positions raus  Report</li>
 * <li>Sammle alle Repayment Positions raus  Report</li>
 * <li>Sammle alle Complaint Positionen die mit den Repayment Positionen zusammenhngen raus  Report</li>
 * <li>Sammle alle Compleints die DISCHARDED sind  Report</li>
 * <li>Alles was brig ist, sollten (offene) Complaints sein  Active Info</li>
 * </ol>
 * <p>
 * It's not allowed to have a null value in the collection.
 * <p>
 * @param allLines   all lines.
 * @param reportType the report type
 * @return
 */
public static PrepareReportPartition partition(Collection<ReportLine> allLines, TradeName reportType) {
    L.debug("filter {}", allLines);
    NavigableSet<ReportLine> reportAble = new TreeSet<>();
    for (ReportLine line : allLines) {
        L.debug("filter processing {}", line.toSimple());
        if (!(line.getDocumentType() == DocumentType.ANNULATION_INVOICE
                || line.getDocumentType() == DocumentType.CREDIT_MEMO
                || line.getDocumentType() == DocumentType.CAPITAL_ASSET
                || (line.getDocumentType() == DocumentType.COMPLAINT
                        && line.getWorkflowStatus() == WorkflowStatus.DISCHARGED)
                || (line.getDocumentType() == DocumentType.INVOICE && line.hasNoRepayments())
                        && line.hasNoOpenComplaints()))
            continue;
        L.debug("filter processing, add to reportAble {}", line.toSimple());
        reportAble.add(line);
        Date tomorrow = DateUtils.addDays(line.getReportingDate(), 1);
        for (ReportLine ref : line.getRefrences()) {
            if (ref.getDocumentType() == DocumentType.COMPLAINT && !ref.isInReport(reportType)) {
                L.debug("filter processing referencing complaints, add to reportAble {}", ref.toSimple());
                reportAble.add(ref);
            } else if (ref.getDocumentType() == DocumentType.INVOICE && !ref.isInReport(reportType)
                    && ref.getReportingDate().before(tomorrow)) {
                L.debug("filter processing referencing invoices, add to reportAble {}", ref.toSimple());
                reportAble.add(ref);
            }
        }
    }
    NavigableSet<ReportLine> activeInfo = new TreeSet<>(allLines);
    activeInfo.removeAll(reportAble);
    return new PrepareReportPartition(reportAble, activeInfo);
}

From source file:eu.ggnet.dwoss.report.entity.Report.java

/**
 * Returns all Reportlines, that don't have an impact on the result, but have only informational character.
 * <p>/*from  www . j a  v  a 2s .  c  o m*/
 * @return all Reportlines, that don't have an impact on the result, but have only informational character.
 */
public NavigableSet<ReportLine> filterInfos() {
    NavigableSet<ReportLine> result = new TreeSet<>(lines);
    result.removeAll(filterInvoiced());
    result.removeAll(filterRepayed());
    return result;
}

From source file:eu.ggnet.dwoss.report.entity.Report.java

/**
 * Returns all Lines of the Report for Category Invoiced, split by mfgDate - startOfReport &lt; 1 year and the rest.
 * This consists of://w  w w.  j av  a  2s  .  com
 * <ul>
 * <li>Position of Type Invoice, with no References</li>
 * <li>Position of Type UNIT_ANNEX in DocumentType CREDIT_MEMO/ANNULATIION_INVOICE and a Referencing Invoice in the same report.</li>
 * </ul>
 * <p>
 * @return all Lines of the Report for Category Invoiced.
 */
public YearSplit filterInvoicedSplit() {
    NavigableSet<ReportLine> pastSplit = filterInvoiced();
    NavigableSet<ReportLine> preSplit = new TreeSet<>();
    Date splitter = DateUtils.addYears(startingDate, -1);
    for (ReportLine line : pastSplit) {
        if (splitter.before(line.getMfgDate()))
            preSplit.add(line);
    }
    pastSplit.removeAll(preSplit);
    return new YearSplit(startingDate, preSplit, pastSplit);
}

From source file:com.enitalk.configs.DateCache.java

@Bean(name = "skipCache")
public LoadingCache<String, ConcurrentSkipListSet<DateTime>> datesMap() {
    CacheBuilder<Object, Object> ccc = CacheBuilder.newBuilder();
    ccc.expireAfterWrite(2, TimeUnit.MINUTES);

    LoadingCache<String, ConcurrentSkipListSet<DateTime>> cache = ccc
            .build(new CacheLoader<String, ConcurrentSkipListSet<DateTime>>() {

                @Override/*from  w  ww  . ja v  a2 s .c o m*/
                public ConcurrentSkipListSet<DateTime> load(String key) throws Exception {
                    try {
                        HashMap teachers = mongo.findOne(Query.query(Criteria.where("i").is(key)),
                                HashMap.class, "teachers");
                        ObjectNode teacherJson = jackson.convertValue(teachers, ObjectNode.class);
                        String timeZone = teacherJson.at("/calendar/timeZone").asText();

                        NavigableSet<DateTime> set = days(teacherJson.path("schedule"), timeZone, teacherJson);

                        DateTimeZone dzz = DateTimeZone.forID(timeZone);
                        DateTimeFormatter df = ISODateTimeFormat.dateTimeNoMillis().withZone(dzz);

                        byte[] events = calendar.busyEvents(jackson.createObjectNode().put("id", key));
                        JsonNode evs = jackson.readTree(events);
                        Iterator<JsonNode> its = evs.iterator();
                        TreeSet<DateTime> dates = new TreeSet<>();
                        while (its.hasNext()) {
                            String date = its.next().asText();
                            DateTime av = df.parseDateTime(date).toDateTime(DateTimeZone.UTC);
                            dates.add(av);
                        }

                        set.removeAll(dates);

                        logger.info("Dates for i {} {}", key, set);

                        return new ConcurrentSkipListSet<>(set);

                    } catch (Exception e) {
                        logger.error(ExceptionUtils.getFullStackTrace(e));
                    }
                    return null;
                }

            });

    return cache;
}

From source file:org.apache.hadoop.hbase.rsgroup.RSGroupInfoManagerImpl.java

private synchronized void refresh(boolean forceOnline) throws IOException {
    List<RSGroupInfo> groupList = new LinkedList<RSGroupInfo>();

    // overwrite anything read from zk, group table is source of truth
    // if online read from GROUP table
    if (forceOnline || isOnline()) {
        LOG.debug("Refreshing in Online mode.");
        if (rsGroupTable == null) {
            rsGroupTable = conn.getTable(RSGROUP_TABLE_NAME);
        }/*  w w w  . j  ava 2 s. co  m*/
        groupList.addAll(rsGroupSerDe.retrieveGroupList(rsGroupTable));
    } else {
        LOG.debug("Refershing in Offline mode.");
        String groupBasePath = ZKUtil.joinZNode(watcher.baseZNode, rsGroupZNode);
        groupList.addAll(rsGroupSerDe.retrieveGroupList(watcher, groupBasePath));
    }

    // refresh default group, prune
    NavigableSet<TableName> orphanTables = new TreeSet<TableName>();
    for (String entry : master.getTableDescriptors().getAll().keySet()) {
        orphanTables.add(TableName.valueOf(entry));
    }

    List<TableName> specialTables;
    if (!master.isInitialized()) {
        specialTables = new ArrayList<TableName>();
        specialTables.add(AccessControlLists.ACL_TABLE_NAME);
        specialTables.add(TableName.META_TABLE_NAME);
        specialTables.add(TableName.NAMESPACE_TABLE_NAME);
        specialTables.add(RSGROUP_TABLE_NAME);
    } else {
        specialTables = master.listTableNamesByNamespace(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME_STR);
    }

    for (TableName table : specialTables) {
        orphanTables.add(table);
    }
    for (RSGroupInfo group : groupList) {
        if (!group.getName().equals(RSGroupInfo.DEFAULT_GROUP)) {
            orphanTables.removeAll(group.getTables());
        }
    }

    // This is added to the last of the list
    // so it overwrites the default group loaded
    // from region group table or zk
    groupList.add(
            new RSGroupInfo(RSGroupInfo.DEFAULT_GROUP, Sets.newHashSet(getDefaultServers()), orphanTables));

    // populate the data
    HashMap<String, RSGroupInfo> newGroupMap = Maps.newHashMap();
    HashMap<TableName, String> newTableMap = Maps.newHashMap();
    for (RSGroupInfo group : groupList) {
        newGroupMap.put(group.getName(), group);
        for (TableName table : group.getTables()) {
            newTableMap.put(table, group.getName());
        }
    }
    rsGroupMap = Collections.unmodifiableMap(newGroupMap);
    tableMap = Collections.unmodifiableMap(newTableMap);

    prevRSGroups.clear();
    prevRSGroups.addAll(rsGroupMap.keySet());
}