Example usage for java.util.function BiConsumer BiConsumer

List of usage examples for java.util.function BiConsumer BiConsumer

Introduction

In this page you can find the example usage for java.util.function BiConsumer BiConsumer.

Prototype

BiConsumer

Source Link

Usage

From source file:com.github.totyumengr.minicubes.cluster.BootTimeSeriesMiniCubeController.java

@RequestMapping(value = "/distinct", method = { RequestMethod.POST, RequestMethod.GET })
public @ResponseBody Map<Integer, Set<Integer>> distinct(@NotBlank @RequestParam String indName,
        @NotBlank @RequestParam(required = false) Boolean isDim,
        @RequestParam(required = false) String filterDims, @RequestParam String groupbyDim,
        @NotBlank @RequestParam String... timeSeries) throws Throwable {

    LOGGER.info("Try to distinct {} on {} with filter {}.", indName, ObjectUtils.getDisplayString(timeSeries),
            filterDims);/*from  w  ww .j a  v  a  2  s . co m*/
    long timing = System.currentTimeMillis();
    Map<String, List<Integer>> filter = (filterDims == null || "".equals(filterDims)) ? null
            : objectMapper.readValue(filterDims, new TypeReference<Map<String, List<Integer>>>() {
            });
    Map<Integer, RoaringBitmap> distinct = manager.aggs(timeSeries).distinct(indName,
            isDim == null ? true : isDim, groupbyDim, filter);
    LOGGER.info("Sucess to distinct {} on {} result size is {} using {}ms.", indName, timeSeries,
            distinct.size(), System.currentTimeMillis() - timing);
    LOGGER.debug("Sucess to distinct {} on {} result is {}.", indName, timeSeries, distinct);

    Map<Integer, Set<Integer>> result = new HashMap<Integer, Set<Integer>>();
    distinct.forEach(new BiConsumer<Integer, RoaringBitmap>() {
        @Override
        public void accept(Integer t, RoaringBitmap u) {
            result.put(t, Arrays.stream(u.toArray()).collect(HashSet<Integer>::new, Set::add, (l, r) -> {
            }));
        }
    });

    return result;
}

From source file:com.qwazr.scheduler.SchedulerManager.java

private void buildSchedulerMap() throws SchedulerException {
    synchronized (globalScheduler) {
        globalScheduler.clear();/*from ww w.j a v  a  2  s  .c  o  m*/
    }
    final Map<String, SchedulerDefinition> map = new HashMap<>();
    schedulerFileMap.forEach((file, schedulerDefMap) -> map.putAll(schedulerDefMap));
    final List<String> removeKeys = new ArrayList<>();

    // Remove the no more existing jobs status
    statusMapLock.r.lock();
    try {
        schedulerStatusMap.forEach(new BiConsumer<String, List<ScriptRunStatus>>() {
            @Override
            public void accept(String name, List<ScriptRunStatus> scriptRunStatuses) {
                if (!map.containsKey(name))
                    removeKeys.add(name);
            }
        });
        removeKeys.forEach((name) -> schedulerStatusMap.remove(name));
    } finally {
        statusMapLock.r.unlock();
    }

    // Set the volatile map
    schedulerMap = map;

    // Reschedule the jobs
    schedulerMap.forEach(new BiConsumer<String, SchedulerDefinition>() {
        @Override
        public void accept(String name, SchedulerDefinition schedulerDefinition) {
            try {
                checkSchedulerCron(name, schedulerDefinition);
            } catch (SchedulerException e) {
                if (logger.isErrorEnabled())
                    logger.error("Error on scheduler " + name + ": " + e.getMessage(), e);
            }
        }
    });
}

From source file:gov.va.oia.terminology.converters.sharedUtils.propertyTypes.Property.java

/**
 * This is called just before a metadata concept is written when the typical loadMetaDataItems(...) sequence is used in the eConceptUtility.  
 * //from w  ww.j  a va 2 s .  co m
 * Any the created concept will be passed to any registered listeners before the concept is written.
 * @param concept
 */
public BiConsumer<TtkConceptChronicle, EConceptUtility> getCallback() {
    return new BiConsumer<TtkConceptChronicle, EConceptUtility>() {
        @Override
        public void accept(TtkConceptChronicle concept, EConceptUtility utility) {
            try {
                if (Property.this.getPropertyType().createAsDynamicRefex()) {
                    TtkUtils.configureConceptAsDynamicRefex(concept,
                            (StringUtils.isNotEmpty(Property.this.getSourcePropertyDefinition())
                                    ? Property.this.getSourcePropertyDefinition()
                                    : "Dynamic Sememe"),
                            Property.this.getDataColumnsForDynamicRefex(), null, null,
                            ((rev) -> utility.setRevisionAttributes(rev, Status.ACTIVE,
                                    concept.getConceptAttributes().getTime())));

                    utility.registerDynamicSememeColumnInfo(Property.this.getUUID(),
                            Property.this.getDataColumnsForDynamicRefex());

                    if (Property.this.getDataColumnsForDynamicRefex() != null
                            && Property.this.getDataColumnsForDynamicRefex().length > 0) {
                        if (Property.this.getDataColumnsForDynamicRefex() != null) {
                            Integer[] temp = new Integer[Property.this.getDataColumnsForDynamicRefex().length];
                            for (int i = 0; i < temp.length; i++) {
                                temp[i] = i;
                            }

                            //Not really the right place to put this sememe... but it will get moved appropriate when loaded in ochre.
                            concept.getConceptAttributes().getAnnotationsDynamic()
                                    .add(TtkUtils.configureDynamicRefexIndexes(Property.this.getUUID(), temp,
                                            (rev -> utility.setRevisionAttributes(rev, Status.ACTIVE,
                                                    concept.getConceptAttributes().getTime()))));
                        }
                    }
                }

                if (Property.this instanceof PropertyAssociation) {
                    PropertyAssociation item = (PropertyAssociation) Property.this;
                    DynamicSememeColumnInfo[] columns = TtkUtils.configureConceptAsAssociation(concept,
                            item.getSourcePropertyDefinition(), item.getAssociationInverseName(),
                            item.getAssociationComponentTypeRestriction(),
                            item.getAssociationComponentTypeSubRestriction(),
                            (rev -> utility.setRevisionAttributes(rev, Status.ACTIVE,
                                    concept.getConceptAttributes().getTime())));
                    utility.registerDynamicSememeColumnInfo(Property.this.getUUID(), columns);
                }
            } catch (NoSuchAlgorithmException | UnsupportedEncodingException | PropertyVetoException e) {
                throw new RuntimeException("Unexpected");
            }

            for (ConceptCreationNotificationListener ccn : listeners_) {
                ccn.conceptCreated(Property.this, concept);
            }
        }
    };
}

From source file:com.qwazr.search.annotations.AnnotatedIndexService.java

/**
 * Build a new Map by reading the IndexField annotations
 *
 * @param row the record//from   ww w.j ava 2  s .co  m
 * @return a new Map
 */
private Map<String, Object> newMap(final T row) {
    final Map<String, Object> map = new HashMap<>();
    fieldMap.forEach(new BiConsumer<String, Field>() {
        @Override
        public void accept(String name, Field field) {
            try {
                Object value = field.get(row);
                if (value == null)
                    return;
                map.put(name, value);
            } catch (IllegalAccessException e) {
                throw new RuntimeException(e);
            }
        }
    });
    return map.isEmpty() ? null : map;
}

From source file:com.qwazr.search.annotations.AnnotatedIndexService.java

private T toRecord(Map<String, Object> fields) throws ReflectiveOperationException {
    if (fields == null)
        return null;
    final T record = indexDefinitionClass.newInstance();
    fields.forEach(new BiConsumer<String, Object>() {
        @Override/*from   ww w . j ava 2s.c o m*/
        public void accept(String fieldName, Object fieldValue) {
            Field field = fieldMap.get(fieldName);
            if (field == null)
                return;
            try {
                field.set(record, fieldValue);
            } catch (IllegalAccessException e) {
                throw new RuntimeException(e);
            }
        }
    });
    return record;
}

From source file:com.diversityarrays.kdxplore.field.OriginDirectionTraversalChoicePanel.java

public void setOnlyAllow(OrOrTr... oots) {
    List<OrOrTr> list = Arrays.asList(oots);

    Set<Orientation> orientationSet = list.stream().map(OrOrTr::getOrientation).collect(Collectors.toSet());

    Set<Origin> originSet = list.stream().map(OrOrTr::getOrigin).collect(Collectors.toSet());

    Set<Traversal> traversalSet = list.stream().map(OrOrTr::getTraversal).collect(Collectors.toSet());

    BiConsumer<RbPair, OT_RadioButton> consumer = new BiConsumer<RbPair, OT_RadioButton>() {
        @Override// w  w w.j av a2s  . c  o  m
        public void accept(RbPair rbPair, OT_RadioButton btn) {
            boolean enable = orientationSet.contains(btn.orientation) && traversalSet.contains(btn.traversal)
                    && originSet.contains(rbPair.cardName.origin);
            btn.setEnabled(enable);
        }
    };
    for (RbPair rbp : rbPairs) {
        consumer.accept(rbp, rbp.rb1);
        consumer.accept(rbp, rbp.rb2);
    }

    for (CornerLabel label : cornerLabels) {
        label.setEnabled(originSet.contains(label.origin));
    }

    for (CornerDirectionRadioButton cdrb : cornerDirectionButtons) {
        cdrb.setEnabled(cdrb.rbPair.isEitherButtonEnabled());
    }

    for (OT_RadioButton rb : orientationTraversalButtons) {
        rb.setEnabled(orientationSet.contains(rb.orientation) && traversalSet.contains(rb.traversal));
    }
}

From source file:com.qwazr.crawler.web.manager.WebCrawlThread.java

private void crawlUrlMap(Set<URI> crawledURIs, Map<String, Integer> urlMap) {

    urlMap.forEach(new BiConsumer<String, Integer>() {
        @Override// w ww .  j a v  a2 s.c  om
        public void accept(String uri, Integer depth) {
            try {
                crawlOne(crawledURIs, new URI(uri), null, depth);
            } catch (Exception e) {
                logger.warn("Malformed URI: " + uri);
            }
        }
    });
}

From source file:com.ikanow.aleph2.shared.crud.elasticsearch.services.ElasticsearchCrudService.java

@Override
public CompletableFuture<Supplier<Object>> storeObject(final O new_object, final boolean replace_if_present) {
    try {/*from  ww w . ja v a2 s  .  c om*/
        final ReadWriteContext rw_context = getRwContextOrThrow(_state.es_context, "storeObject");

        final IndexRequestBuilder irb = singleObjectIndexRequest(Either.left(rw_context),
                Either.left(new_object), replace_if_present, false);

        // Execute and handle result

        final Function<IndexResponse, Supplier<Object>> success_handler = ir -> {
            return () -> ir.getId();
        };

        // Recursive, so has some hoops to jump through (lambda can't access itself)
        final BiConsumer<Throwable, CompletableFuture<Supplier<Object>>> error_handler = new BiConsumer<Throwable, CompletableFuture<Supplier<Object>>>() {
            @Override
            public void accept(final Throwable error, final CompletableFuture<Supplier<Object>> future) {
                Patterns.match(error).andAct()
                        .when(org.elasticsearch.index.mapper.MapperParsingException.class, mpe -> {
                            final Set<String> fixed_type_fields = rw_context.typeContext().fixed_type_fields();
                            if (!fixed_type_fields.isEmpty()) {
                                // Obtain the field name from the exception (if we fail then drop the record) 
                                final String field = getFieldFromParsingException(mpe.getMessage());
                                if ((null == field) || fixed_type_fields.contains(field)) {
                                    future.completeExceptionally(error);
                                    return;
                                }
                            } //(else roll on to...)                     
                            Patterns.match(rw_context.typeContext()).andAct().when(
                                    ElasticsearchContext.TypeContext.ReadWriteTypeContext.AutoRwTypeContext.class,
                                    auto_context -> {
                                        irb.setType(ElasticsearchContextUtils.getNextAutoType(
                                                auto_context.getPrefix(), irb.request().type()));
                                        ElasticsearchFutureUtils.wrap(irb.execute(), future,
                                                (ir, next_future) -> {
                                                    next_future.complete(success_handler.apply(ir));
                                                }, this);
                                    }).otherwise(() -> future.completeExceptionally(error));
                        }).otherwise(() -> future.completeExceptionally(error));
            }
        };

        return ElasticsearchFutureUtils.wrap(irb.execute(), success_handler, error_handler);
    } catch (Exception e) {
        return FutureUtils.returnError(e);
    }
}

From source file:com.ikanow.aleph2.shared.crud.elasticsearch.services.ElasticsearchCrudService.java

@Override
public CompletableFuture<Tuple2<Supplier<List<Object>>, Supplier<Long>>> storeObjects(final List<O> new_objects,
        final boolean replace_if_present) {
    try {/*from  w w  w .  j  a  v a2  s.com*/
        final ReadWriteContext rw_context = getRwContextOrThrow(_state.es_context, "storeObjects");

        final BulkRequestBuilder brb = new_objects.stream()
                .reduce(_state.client.prepareBulk().setConsistencyLevel(WriteConsistencyLevel.ONE)
                        .setRefresh(CreationPolicy.AVAILABLE_IMMEDIATELY == _state.creation_policy),
                        (acc, val) -> acc.add(singleObjectIndexRequest(Either.left(rw_context),
                                Either.left(val), replace_if_present, true)),
                        (acc1, acc2) -> {
                            throw new RuntimeException("Internal logic error - Parallel not supported");
                        });

        final BiConsumer<BulkResponse, CompletableFuture<Tuple2<Supplier<List<Object>>, Supplier<Long>>>> action_handler = new BiConsumer<BulkResponse, CompletableFuture<Tuple2<Supplier<List<Object>>, Supplier<Long>>>>() {
            // WARNING: mutable/imperative code ahead...
            long _curr_written = 0;
            List<Object> _id_list = null;
            HashMap<String, String> _mapping_failures = null;

            @Override
            public void accept(final BulkResponse result,
                    final CompletableFuture<Tuple2<Supplier<List<Object>>, Supplier<Long>>> future) {

                if (result.hasFailures() && (rw_context
                        .typeContext() instanceof ElasticsearchContext.TypeContext.ReadWriteTypeContext.AutoRwTypeContext)) {
                    final ElasticsearchContext.TypeContext.ReadWriteTypeContext.AutoRwTypeContext auto_context = (ElasticsearchContext.TypeContext.ReadWriteTypeContext.AutoRwTypeContext) rw_context
                            .typeContext();
                    // Recursive builder in case I need to build a second batch of docs                        
                    BulkRequestBuilder brb2 = null;

                    if (null == _id_list) {
                        _id_list = new LinkedList<Object>();
                    }
                    HashMap<String, String> temp_mapping_failures = null;
                    final Iterator<BulkItemResponse> it = result.iterator();
                    while (it.hasNext()) {
                        final BulkItemResponse bir = it.next();
                        if (bir.isFailed()) {
                            if (bir.getFailure().getMessage().startsWith("MapperParsingException")) {
                                final Set<String> fixed_type_fields = rw_context.typeContext()
                                        .fixed_type_fields();
                                if (!fixed_type_fields.isEmpty()) {
                                    // Obtain the field name from the exception (if we fail then drop the record) 
                                    final String field = getFieldFromParsingException(
                                            bir.getFailure().getMessage());
                                    if ((null == field) || fixed_type_fields.contains(field)) {
                                        continue;
                                    }
                                } //(else roll on to...)                                                

                                // OK this is the case where I might be able to apply auto types:
                                if (null == brb2) {
                                    brb2 = _state.client.prepareBulk()
                                            .setConsistencyLevel(WriteConsistencyLevel.ONE).setRefresh(
                                                    CreationPolicy.AVAILABLE_IMMEDIATELY == _state.creation_policy);
                                }
                                String failed_json = null;
                                if (null == _mapping_failures) { // first time through, use item id to grab the objects from the original request
                                    if (null == temp_mapping_failures) {
                                        temp_mapping_failures = new HashMap<String, String>();
                                    }
                                    final ActionRequest<?> ar = brb.request().requests().get(bir.getItemId());
                                    if (ar instanceof IndexRequest) {
                                        IndexRequest ir = (IndexRequest) ar;
                                        failed_json = ir.source().toUtf8();
                                        temp_mapping_failures.put(bir.getId(), failed_json);
                                    }
                                } else { // have already grabbed all the failure _ids and stuck in a map
                                    failed_json = _mapping_failures.get(bir.getId());
                                }
                                if (null != failed_json) {
                                    brb2.add(singleObjectIndexRequest(
                                            Either.right(Tuples._2T(bir.getIndex(),
                                                    ElasticsearchContextUtils.getNextAutoType(
                                                            auto_context.getPrefix(), bir.getType()))),
                                            Either.right(Tuples._2T(bir.getId(), failed_json)), false, true));
                                }
                            }
                            // Ugh otherwise just silently fail I guess? 
                            //(should I also look for transient errors and resubmit them after a pause?!)
                        } else { // (this item worked)
                            _id_list.add(bir.getId());
                            _curr_written++;
                        }
                    }
                    if (null != brb2) { // found mapping errors to retry with
                        if (null == _mapping_failures) // (first level of recursion)
                            _mapping_failures = temp_mapping_failures;

                        // (note that if brb2.request().requests().isEmpty() this is an internal logic error, so it's OK to throw)
                        ElasticsearchFutureUtils.wrap(brb2.execute(), future, this, (error, future2) -> {
                            future2.completeExceptionally(error);
                        });
                    } else { // relative success, plus we've built the list anyway
                        future.complete(Tuples._2T(() -> _id_list, () -> (Long) _curr_written));
                    }
                } else { // No errors with this iteration of the bulk request         
                    _curr_written += result.getItems().length;

                    if (null == _id_list) { // This is the first bulk request, no recursion on failures, so can lazily create the list in case it isn't needed
                        final Supplier<List<Object>> get_objects = () -> {
                            return StreamSupport.stream(result.spliterator(), false)
                                    .filter(bir -> !bir.isFailed()).map(bir -> bir.getId())
                                    .collect(Collectors.toList());
                        };
                        final Supplier<Long> get_count_workaround = () -> {
                            return StreamSupport.stream(result.spliterator(), false)
                                    .filter(bir -> !bir.isFailed()).collect(Collectors.counting());
                        };
                        get_count_workaround.get();
                        future.complete(Tuples._2T(get_objects, get_count_workaround));
                    } else { // have already calculated everything so just return it                     
                        future.complete(Tuples._2T(() -> _id_list, () -> (Long) _curr_written));
                    }
                }
            }
        };

        return ElasticsearchFutureUtils.wrap(brb.execute(),
                new CompletableFuture<Tuple2<Supplier<List<Object>>, Supplier<Long>>>(), action_handler,
                (error, future) -> {
                    future.completeExceptionally(error);
                });
    } catch (Exception e) {
        return FutureUtils.returnError(e);
    }
}

From source file:com.diversityarrays.kdxplore.KDXploreFrame.java

private Map<KdxApp, Component> collectKdxApps(String[] classNames) throws IOException {

    Map<KdxApp, Component> result = new HashMap<>();

    BiConsumer<String, Either<Throwable, KdxAppService>> onServiceFound = new BiConsumer<String, Either<Throwable, KdxAppService>>() {
        @Override/* w  ww . j a  va 2 s. com*/
        public void accept(String className, Either<Throwable, KdxAppService> either) {
            Throwable error = null;
            if (either.isRight()) {
                KdxAppService kdxAppService = either.right();
                if (kdxAppService != null) {
                    try {
                        KdxApp kdxApp = kdxAppService.createKdxApp(pluginInfo);
                        Component uiComponent = kdxApp.getUIComponent();
                        result.put(kdxApp, uiComponent);
                    } catch (Exception | NoClassDefFoundError e) {
                        error = e;
                    }
                }
            } else {
                error = either.left();
            }

            if (error != null) {
                String msg = Msg.MSG_PROBLEM_GETTING_KDXAPP(className);
                Shared.Log.w(TAG, msg, error);
                messagesPanel.println(msg);
                messagesPanel.println(error);
            }
        }
    };

    Shared.detectServices(KdxAppService.class, onServiceFound, classNames);

    return result;
}