Example usage for java.util.concurrent ConcurrentLinkedDeque add

List of usage examples for java.util.concurrent ConcurrentLinkedDeque add

Introduction

In this page you can find the example usage for java.util.concurrent ConcurrentLinkedDeque add.

Prototype

public boolean add(E e) 

Source Link

Document

Inserts the specified element at the tail of this deque.

Usage

From source file:gobblin.couchbase.writer.CouchbaseWriterTest.java

private List<Pair<AbstractDocument, Future>> writeRecords(Iterator<AbstractDocument> recordIterator,
        CouchbaseWriter writer, int outstandingRequests, long kvTimeout, TimeUnit kvTimeoutUnit)
        throws DataConversionException, UnsupportedEncodingException {
    final BlockingQueue<Pair<AbstractDocument, Future>> outstandingCallQueue = new LinkedBlockingDeque<>(
            outstandingRequests);/* w w w. j a  v  a2  s  . com*/
    final List<Pair<AbstractDocument, Future>> failedFutures = new ArrayList<>(outstandingRequests);

    int index = 0;
    long runTime = 0;
    final AtomicInteger callbackSuccesses = new AtomicInteger(0);
    final AtomicInteger callbackFailures = new AtomicInteger(0);
    final ConcurrentLinkedDeque<Throwable> callbackExceptions = new ConcurrentLinkedDeque<>();
    Verifier verifier = new Verifier();
    while (recordIterator.hasNext()) {
        AbstractDocument doc = recordIterator.next();
        index++;
        verifier.onWrite(doc);
        final long startTime = System.nanoTime();
        Future callFuture = writer.write(doc, new WriteCallback<TupleDocument>() {
            @Override
            public void onSuccess(WriteResponse<TupleDocument> writeResponse) {
                callbackSuccesses.incrementAndGet();
            }

            @Override
            public void onFailure(Throwable throwable) {
                callbackFailures.incrementAndGet();
                callbackExceptions.add(throwable);
            }
        });
        drainQueue(outstandingCallQueue, 1, kvTimeout, kvTimeoutUnit, failedFutures);
        outstandingCallQueue.add(new Pair<>(doc, callFuture));
        runTime += System.nanoTime() - startTime;
    }
    int failedWrites = 0;
    long responseStartTime = System.nanoTime();
    drainQueue(outstandingCallQueue, outstandingRequests, kvTimeout, kvTimeoutUnit, failedFutures);
    runTime += System.nanoTime() - responseStartTime;

    for (Throwable failure : callbackExceptions) {
        System.out.println(failure.getClass() + " : " + failure.getMessage());
    }
    failedWrites += failedFutures.size();

    System.out.println("Total time to send " + index + " records = " + runTime / 1000000.0 + "ms, "
            + "Failed writes = " + failedWrites + " Callback Successes = " + callbackSuccesses.get()
            + "Callback Failures = " + callbackFailures.get());

    verifier.verify(writer.getBucket());
    return failedFutures;
}

From source file:gobblin.ingestion.google.webmaster.GoogleWebmasterDataFetcherImpl.java

private void submitJob(final Pair<String, FilterOperator> job, final ApiDimensionFilter countryFilter,
        final String startDate, final String endDate, final List<Dimension> dimensions, ExecutorService es,
        final ConcurrentLinkedDeque<String> allPages,
        final ConcurrentLinkedDeque<Pair<String, FilterOperator>> nextRound) {
    es.submit(new Runnable() {
        @Override/*from w w w.  j  av a 2 s  . c o m*/
        public void run() {
            try {
                LIMITER.acquirePermits(1);
            } catch (InterruptedException e) {
                throw new RuntimeException("RateBasedLimiter got interrupted.", e);
            }

            String countryString = countryFilterToString(countryFilter);
            List<ApiDimensionFilter> filters = new LinkedList<>();
            filters.add(countryFilter);

            String prefix = job.getLeft();
            FilterOperator operator = job.getRight();
            String jobString = String.format("job(prefix: %s, operator: %s)", prefix, operator);
            filters.add(GoogleWebmasterFilter.pageFilter(operator, prefix));
            List<String> pages;
            try {
                pages = _client.getPages(_siteProperty, startDate, endDate, countryString,
                        GoogleWebmasterClient.API_ROW_LIMIT, dimensions, filters, 0);
                log.debug(String.format("%d pages fetched for %s market-%s from %s to %s.", pages.size(),
                        jobString, countryString, startDate, endDate));
            } catch (IOException e) {
                log.debug(String.format("%s failed due to %s. Retrying...", jobString, e.getMessage()));
                nextRound.add(job);
                return;
            }

            //If the number of pages is at the LIMIT, it must be a "CONTAINS" job.
            //We need to create sub-tasks, and check current page with "EQUALS"
            if (pages.size() == GoogleWebmasterClient.API_ROW_LIMIT) {
                log.info(String.format("Expanding the prefix '%s'", prefix));
                expandJobs(nextRound, prefix);
                nextRound.add(Pair.of(prefix, FilterOperator.EQUALS));
            } else {
                //Otherwise, we've done with current job.
                allPages.addAll(pages);
            }
        }
    });
}

From source file:org.apache.eagle.alert.engine.publisher.dedup.DedupCache.java

private DedupValue[] add(EventUniq eventEniq, AlertStreamEvent event, String stateFieldValue,
        String stateCloseValue) {
    DedupValue dedupValue = null;/*from   w  ww.  j a va 2 s . c o  m*/
    if (!events.containsKey(eventEniq)) {
        dedupValue = createDedupValue(eventEniq, event, stateFieldValue);
        ConcurrentLinkedDeque<DedupValue> dedupValues = new ConcurrentLinkedDeque<>();
        dedupValues.add(dedupValue);
        // skip the event which put failed due to concurrency
        events.put(eventEniq, dedupValues);
        LOG.info("{} Add new dedup key {}, and value {}", this.publishName, eventEniq, dedupValues);
    } else if (!StringUtils.equalsIgnoreCase(stateFieldValue,
            events.get(eventEniq).getLast().getStateFieldValue())) {
        // existing a de-dup value, try update or reset
        DedupValue lastDedupValue = events.get(eventEniq).getLast();
        dedupValue = updateDedupValue(lastDedupValue, eventEniq, event, stateFieldValue, stateCloseValue);
        LOG.info("{} Update dedup key {}, and value {}", this.publishName, eventEniq, dedupValue);
    }
    if (dedupValue == null) {
        return null;
    }
    return new DedupValue[] { dedupValue };
}

From source file:org.apache.gobblin.ingestion.google.webmaster.GoogleWebmasterDataFetcherImpl.java

private void submitJob(final Pair<String, FilterOperator> job, final ApiDimensionFilter countryFilter,
        final String startDate, final String endDate, final List<Dimension> dimensions, ExecutorService es,
        final ConcurrentLinkedDeque<String> allPages,
        final ConcurrentLinkedDeque<Pair<String, FilterOperator>> nextRound, final int rowLimit) {
    es.submit(new Runnable() {
        @Override/*from   w  ww  . j  a va  2s . c  om*/
        public void run() {
            try {
                LIMITER.acquirePermits(1);
            } catch (InterruptedException e) {
                throw new RuntimeException("RateBasedLimiter got interrupted.", e);
            }

            String countryString = countryFilterToString(countryFilter);
            List<ApiDimensionFilter> filters = new LinkedList<>();
            filters.add(countryFilter);

            String prefix = job.getLeft();
            FilterOperator operator = job.getRight();
            String jobString = String.format("job(prefix: %s, operator: %s)", prefix, operator);
            filters.add(GoogleWebmasterFilter.pageFilter(operator, prefix));
            List<String> pages;
            try {
                pages = _client.getPages(_siteProperty, startDate, endDate, countryString, rowLimit, dimensions,
                        filters, 0);
                log.debug(String.format("%d pages fetched for %s market-%s from %s to %s.", pages.size(),
                        jobString, countryString, startDate, endDate));
            } catch (IOException e) {
                log.debug(String.format("%s failed due to %s. Retrying...", jobString, e.getMessage()));
                nextRound.add(job);
                return;
            }

            //If the number of pages is at the LIMIT, it must be a "CONTAINS" job.
            //We need to create sub-tasks, and check current page with "EQUALS"
            if (pages.size() == GoogleWebmasterClient.API_ROW_LIMIT) {
                log.info(String.format("Expanding the prefix '%s'", prefix));
                nextRound.add(Pair.of(prefix, FilterOperator.EQUALS));
                for (String expanded : getUrlPartitions(prefix)) {
                    nextRound.add(Pair.of(expanded, FilterOperator.CONTAINS));
                }
            } else {
                //Otherwise, we've done with current job.
                allPages.addAll(pages);
            }
        }
    });
}