Example usage for java.util.concurrent ConcurrentLinkedDeque addAll

List of usage examples for java.util.concurrent ConcurrentLinkedDeque addAll

Introduction

In this page you can find the example usage for java.util.concurrent ConcurrentLinkedDeque addAll.

Prototype

public boolean addAll(Collection<? extends E> c) 

Source Link

Document

Appends all of the elements in the specified collection to the end of this deque, in the order that they are returned by the specified collection's iterator.

Usage

From source file:gobblin.ingestion.google.webmaster.GoogleWebmasterDataFetcherImpl.java

private void submitJob(final Pair<String, FilterOperator> job, final ApiDimensionFilter countryFilter,
        final String startDate, final String endDate, final List<Dimension> dimensions, ExecutorService es,
        final ConcurrentLinkedDeque<String> allPages,
        final ConcurrentLinkedDeque<Pair<String, FilterOperator>> nextRound) {
    es.submit(new Runnable() {
        @Override/*w w  w . j a  va  2  s  . com*/
        public void run() {
            try {
                LIMITER.acquirePermits(1);
            } catch (InterruptedException e) {
                throw new RuntimeException("RateBasedLimiter got interrupted.", e);
            }

            String countryString = countryFilterToString(countryFilter);
            List<ApiDimensionFilter> filters = new LinkedList<>();
            filters.add(countryFilter);

            String prefix = job.getLeft();
            FilterOperator operator = job.getRight();
            String jobString = String.format("job(prefix: %s, operator: %s)", prefix, operator);
            filters.add(GoogleWebmasterFilter.pageFilter(operator, prefix));
            List<String> pages;
            try {
                pages = _client.getPages(_siteProperty, startDate, endDate, countryString,
                        GoogleWebmasterClient.API_ROW_LIMIT, dimensions, filters, 0);
                log.debug(String.format("%d pages fetched for %s market-%s from %s to %s.", pages.size(),
                        jobString, countryString, startDate, endDate));
            } catch (IOException e) {
                log.debug(String.format("%s failed due to %s. Retrying...", jobString, e.getMessage()));
                nextRound.add(job);
                return;
            }

            //If the number of pages is at the LIMIT, it must be a "CONTAINS" job.
            //We need to create sub-tasks, and check current page with "EQUALS"
            if (pages.size() == GoogleWebmasterClient.API_ROW_LIMIT) {
                log.info(String.format("Expanding the prefix '%s'", prefix));
                expandJobs(nextRound, prefix);
                nextRound.add(Pair.of(prefix, FilterOperator.EQUALS));
            } else {
                //Otherwise, we've done with current job.
                allPages.addAll(pages);
            }
        }
    });
}

From source file:org.apache.gobblin.ingestion.google.webmaster.GoogleWebmasterDataFetcherImpl.java

private void submitJob(final Pair<String, FilterOperator> job, final ApiDimensionFilter countryFilter,
        final String startDate, final String endDate, final List<Dimension> dimensions, ExecutorService es,
        final ConcurrentLinkedDeque<String> allPages,
        final ConcurrentLinkedDeque<Pair<String, FilterOperator>> nextRound, final int rowLimit) {
    es.submit(new Runnable() {
        @Override/*from  ww  w  .  jav  a  2  s.c  o  m*/
        public void run() {
            try {
                LIMITER.acquirePermits(1);
            } catch (InterruptedException e) {
                throw new RuntimeException("RateBasedLimiter got interrupted.", e);
            }

            String countryString = countryFilterToString(countryFilter);
            List<ApiDimensionFilter> filters = new LinkedList<>();
            filters.add(countryFilter);

            String prefix = job.getLeft();
            FilterOperator operator = job.getRight();
            String jobString = String.format("job(prefix: %s, operator: %s)", prefix, operator);
            filters.add(GoogleWebmasterFilter.pageFilter(operator, prefix));
            List<String> pages;
            try {
                pages = _client.getPages(_siteProperty, startDate, endDate, countryString, rowLimit, dimensions,
                        filters, 0);
                log.debug(String.format("%d pages fetched for %s market-%s from %s to %s.", pages.size(),
                        jobString, countryString, startDate, endDate));
            } catch (IOException e) {
                log.debug(String.format("%s failed due to %s. Retrying...", jobString, e.getMessage()));
                nextRound.add(job);
                return;
            }

            //If the number of pages is at the LIMIT, it must be a "CONTAINS" job.
            //We need to create sub-tasks, and check current page with "EQUALS"
            if (pages.size() == GoogleWebmasterClient.API_ROW_LIMIT) {
                log.info(String.format("Expanding the prefix '%s'", prefix));
                nextRound.add(Pair.of(prefix, FilterOperator.EQUALS));
                for (String expanded : getUrlPartitions(prefix)) {
                    nextRound.add(Pair.of(expanded, FilterOperator.CONTAINS));
                }
            } else {
                //Otherwise, we've done with current job.
                allPages.addAll(pages);
            }
        }
    });
}