Example usage for java.util.concurrent.atomic AtomicInteger AtomicInteger

List of usage examples for java.util.concurrent.atomic AtomicInteger AtomicInteger

Introduction

In this page you can find the example usage for java.util.concurrent.atomic AtomicInteger AtomicInteger.

Prototype

public AtomicInteger(int initialValue) 

Source Link

Document

Creates a new AtomicInteger with the given initial value.

Usage

From source file:jetbrains.exodus.env.EnvironmentImpl.java

@SuppressWarnings({ "ThisEscapedInObjectConstruction" })
EnvironmentImpl(@NotNull final Log log, @NotNull final EnvironmentConfig ec) {
    this.log = log;
    this.ec = ec;
    applyEnvironmentSettings(log.getLocation(), ec);
    final Pair<MetaTree, Integer> meta = MetaTree.create(this);
    metaTree = meta.getFirst();//from w  w w  .j a va2  s . c  o  m
    structureId = new AtomicInteger(meta.getSecond());
    txns = new TransactionSet();
    txnSafeTasks = new LinkedList<>();
    invalidateStoreGetCache();
    invalidateTreeNodesCache();
    envSettingsListener = new EnvironmentSettingsListener();
    ec.addChangedSettingsListener(envSettingsListener);

    gc = new GarbageCollector(this);

    throwableOnCommit = null;
    throwableOnClose = null;

    if (transactionTimeout() > 0) {
        new StuckTransactionMonitor(this);
    }

    configMBean = ec.isManagementEnabled() ? new jetbrains.exodus.env.management.EnvironmentConfig(this) : null;

    if (logging.isInfoEnabled()) {
        logging.info("Exodus environment created: " + log.getLocation());
    }
}

From source file:com.zhaimi.message.kafka.KafkaReceiver.java

private void processStreamsByTopic(String topicKeys, List<KafkaStream<byte[], byte[]>> streamList) {
    // init stream thread pool
    ExecutorService streamPool = Executors.newFixedThreadPool(partitions);
    String[] topics = StringUtils.split(topicKeys, ",");
    if (log.isDebugEnabled())
        log.debug("???? KafkaStreamList,topic count={},topics={}, partitions/topic={}",
                topics.length, topicKeys, partitions);

    //??stream/*from   www  .j  av a 2s .  com*/
    AtomicInteger index = new AtomicInteger(0);
    for (KafkaStream<byte[], byte[]> stream : streamList) {
        Thread streamThread = new Thread() {

            @Override
            public void run() {
                int i = index.getAndAdd(1);
                if (log.isDebugEnabled())
                    log.debug("???KafkaStream -- No.={}, partitions={}", i, partitions + ":" + i);

                ConsumerIterator<byte[], byte[]> consumerIterator = stream.iterator();

                processStreamByConsumer(topicKeys, consumerIterator);
            }
        };
        streamPool.execute(streamThread);
    }
}

From source file:com.github.tomakehurst.wiremock.matching.EqualToXmlPattern.java

@Override
public MatchResult match(final String value) {
    return new MatchResult() {
        @Override/*  w ww.  j a  v  a  2  s .c  o m*/
        public boolean isExactMatch() {
            if (isNullOrEmpty(value)) {
                return false;
            }

            try {
                Diff diff = DiffBuilder.compare(Input.from(expectedValue)).withTest(value)
                        .withComparisonController(ComparisonControllers.StopWhenDifferent).ignoreWhitespace()
                        .ignoreComments().withDifferenceEvaluator(IGNORE_UNCOUNTED_COMPARISONS).build();

                return !diff.hasDifferences();
            } catch (XMLUnitException e) {
                notifier().info("Failed to process XML. " + e.getMessage() + "\nExpected:\n" + expectedValue
                        + "\n\nActual:\n" + value);
                return false;
            }
        }

        @Override
        public double getDistance() {
            if (isNullOrEmpty(value)) {
                return 1.0;
            }

            final AtomicInteger totalComparisons = new AtomicInteger(0);
            final AtomicInteger differences = new AtomicInteger(0);

            Diff diff = null;
            try {
                diff = DiffBuilder.compare(Input.from(expectedValue)).withTest(value).ignoreWhitespace()
                        .ignoreComments().withDifferenceEvaluator(IGNORE_UNCOUNTED_COMPARISONS)
                        .withComparisonListeners(new ComparisonListener() {
                            @Override
                            public void comparisonPerformed(Comparison comparison, ComparisonResult outcome) {
                                if (COUNTED_COMPARISONS.contains(comparison.getType())
                                        && comparison.getControlDetails().getValue() != null) {
                                    totalComparisons.incrementAndGet();
                                    if (outcome == ComparisonResult.DIFFERENT) {
                                        differences.incrementAndGet();
                                    }
                                }
                            }
                        }).build();
            } catch (XMLUnitException e) {
                notifier().info("Failed to process XML. " + e.getMessage() + "\nExpected:\n" + expectedValue
                        + "\n\nActual:\n" + value);
                return 1.0;
            }

            notifier().info(Joiner.on("\n").join(diff.getDifferences()));

            return differences.doubleValue() / totalComparisons.doubleValue();
        }
    };
}

From source file:org.apache.hadoop.gateway.ha.dispatch.DefaultHaDispatch.java

private void failoverRequest(HttpUriRequest outboundRequest, HttpServletRequest inboundRequest,
        HttpServletResponse outboundResponse, HttpResponse inboundResponse, Exception exception)
        throws IOException {
    LOG.failingOverRequest(outboundRequest.getURI().toString());
    AtomicInteger counter = (AtomicInteger) inboundRequest.getAttribute(FAILOVER_COUNTER_ATTRIBUTE);
    if (counter == null) {
        counter = new AtomicInteger(0);
    }/*www  .ja  v  a  2  s.c o  m*/
    inboundRequest.setAttribute(FAILOVER_COUNTER_ATTRIBUTE, counter);
    if (counter.incrementAndGet() <= maxFailoverAttempts) {
        haProvider.markFailedURL(getServiceRole(), outboundRequest.getURI().toString());
        //null out target url so that rewriters run again
        inboundRequest.setAttribute(AbstractGatewayFilter.TARGET_REQUEST_URL_ATTRIBUTE_NAME, null);
        URI uri = getDispatchUrl(inboundRequest);
        ((HttpRequestBase) outboundRequest).setURI(uri);
        if (failoverSleep > 0) {
            try {
                Thread.sleep(failoverSleep);
            } catch (InterruptedException e) {
                LOG.failoverSleepFailed(getServiceRole(), e);
            }
        }
        executeRequest(outboundRequest, inboundRequest, outboundResponse);
    } else {
        LOG.maxFailoverAttemptsReached(maxFailoverAttempts, getServiceRole());
        if (inboundResponse != null) {
            writeOutboundResponse(outboundRequest, inboundRequest, outboundResponse, inboundResponse);
        } else {
            throw new IOException(exception);
        }
    }
}

From source file:org.apache.hadoop.gateway.rm.dispatch.RMHaDispatchTest.java

@Test
public void testConnectivityFailure() throws Exception {
    String serviceName = "RESOURCEMANAGER";
    HaDescriptor descriptor = HaDescriptorFactory.createDescriptor();
    descriptor.addServiceConfig(//  www . j  a va2  s  . c o m
            HaDescriptorFactory.createServiceConfig(serviceName, "true", "1", "1000", "2", "1000", null, null));
    HaProvider provider = new DefaultHaProvider(descriptor);
    URI uri1 = new URI("http://unreachable-host");
    URI uri2 = new URI("http://reachable-host");
    ArrayList<String> urlList = new ArrayList<>();
    urlList.add(uri1.toString());
    urlList.add(uri2.toString());
    provider.addHaService(serviceName, urlList);
    FilterConfig filterConfig = EasyMock.createNiceMock(FilterConfig.class);
    ServletContext servletContext = EasyMock.createNiceMock(ServletContext.class);

    EasyMock.expect(filterConfig.getServletContext()).andReturn(servletContext).anyTimes();
    EasyMock.expect(servletContext.getAttribute(HaServletContextListener.PROVIDER_ATTRIBUTE_NAME))
            .andReturn(provider).anyTimes();

    BasicHttpParams params = new BasicHttpParams();

    HttpUriRequest outboundRequest = EasyMock.createNiceMock(HttpRequestBase.class);
    EasyMock.expect(outboundRequest.getMethod()).andReturn("GET").anyTimes();
    EasyMock.expect(outboundRequest.getURI()).andReturn(uri1).anyTimes();
    EasyMock.expect(outboundRequest.getParams()).andReturn(params).anyTimes();

    HttpServletRequest inboundRequest = EasyMock.createNiceMock(HttpServletRequest.class);
    EasyMock.expect(inboundRequest.getRequestURL()).andReturn(new StringBuffer(uri2.toString())).once();
    EasyMock.expect(inboundRequest.getAttribute("dispatch.ha.failover.counter")).andReturn(new AtomicInteger(0))
            .once();
    EasyMock.expect(inboundRequest.getAttribute("dispatch.ha.failover.counter")).andReturn(new AtomicInteger(1))
            .once();

    HttpServletResponse outboundResponse = EasyMock.createNiceMock(HttpServletResponse.class);
    EasyMock.expect(outboundResponse.getOutputStream()).andAnswer(new IAnswer<ServletOutputStream>() {
        @Override
        public ServletOutputStream answer() throws Throwable {
            return new ServletOutputStream() {
                @Override
                public void write(int b) throws IOException {
                    throw new IOException("unreachable-host");
                }

                @Override
                public void setWriteListener(WriteListener arg0) {
                }

                @Override
                public boolean isReady() {
                    return false;
                }
            };
        }
    }).once();
    EasyMock.replay(filterConfig, servletContext, outboundRequest, inboundRequest, outboundResponse);
    Assert.assertEquals(uri1.toString(), provider.getActiveURL(serviceName));
    RMHaDispatch dispatch = new RMHaDispatch();
    dispatch.setHttpClient(new DefaultHttpClient());
    dispatch.setHaProvider(provider);
    dispatch.init();
    long startTime = System.currentTimeMillis();
    try {
        dispatch.executeRequest(outboundRequest, inboundRequest, outboundResponse);
    } catch (IOException e) {
        //this is expected after the failover limit is reached
    }
    long elapsedTime = System.currentTimeMillis() - startTime;
    Assert.assertEquals(uri2.toString(), provider.getActiveURL(serviceName));
    //test to make sure the sleep took place
    Assert.assertTrue(elapsedTime > 1000);
}

From source file:com.indeed.lsmtree.recordcache.PersistentRecordCache.java

/**
 * Use {@link com.indeed.lsmtree.recordcache.PersistentRecordCache.Builder#build()} to create instances.
 *
 * @param index                 lsm tree
 * @param recordLogDirectory    record log directory
 * @param checkpointDir         checkpoint directory
 * @throws IOException          thrown if an I/O error occurs
 *///ww  w  .j a v a 2 s.  c o m
private PersistentRecordCache(final Store<K, Long> index,
        final RecordLogDirectory<Operation> recordLogDirectory, final File checkpointDir) throws IOException {
    this.index = index;
    this.comparator = index.getComparator();
    this.recordLogDirectory = recordLogDirectory;
    indexUpdateFunctions = new RecordLogDirectoryPoller.Functions() {

        AtomicLong indexPutTime = new AtomicLong(0);

        AtomicLong indexDeleteTime = new AtomicLong(0);

        AtomicInteger indexPuts = new AtomicInteger(0);

        AtomicInteger indexDeletes = new AtomicInteger(0);

        AtomicInteger count = new AtomicInteger(0);

        @Override
        public void process(final long position, Operation op) throws IOException {

            count.incrementAndGet();
            if (count.get() % 1000 == 0) {
                final int puts = indexPuts.get();
                if (puts > 0)
                    log.debug("avg index put time: " + indexPutTime.get() / puts / 1000d + " us");
                final int deletes = indexDeletes.get();
                if (deletes > 0)
                    log.debug("avg index delete time: " + indexDeleteTime.get() / deletes / 1000d + " us");
            }

            if (op.getClass() == Put.class) {
                final Put<K, V> put = (Put) op;
                final long start = System.nanoTime();
                synchronized (index) {
                    index.put(put.getKey(), position);
                }
                indexPutTime.addAndGet(System.nanoTime() - start);
                indexPuts.incrementAndGet();
            } else if (op.getClass() == Delete.class) {
                final Delete<K> delete = (Delete) op;
                for (K k : delete.getKeys()) {
                    final long start = System.nanoTime();
                    synchronized (index) {
                        index.delete(k);
                    }
                    indexDeleteTime.addAndGet(System.nanoTime() - start);
                    indexDeletes.incrementAndGet();
                }
            } else if (op.getClass() == Checkpoint.class) {
                final Checkpoint checkpoint = (Checkpoint) op;
                if (checkpointDir != null) {
                    sync();
                    index.checkpoint(new File(checkpointDir, String.valueOf(checkpoint.getTimestamp())));
                }
            } else {
                log.warn("operation class unknown");
            }
        }

        @Override
        public void sync() throws IOException {
            final long start = System.nanoTime();
            index.sync();
            log.debug("sync time: " + (System.nanoTime() - start) / 1000d + " us");
        }
    };
}

From source file:com.adobe.acs.commons.workflow.process.impl.ReplicateWithOptionsWorkflowProcess.java

@Override
public void execute(WorkItem workItem, WorkflowSession workflowSession, MetaDataMap metaDataMap)
        throws WorkflowException {
    ResourceResolver resourceResolver = null;
    final long start = System.currentTimeMillis();

    try {/*from   w ww .j av a  2s.co  m*/
        resourceResolver = workflowHelper.getResourceResolver(workflowSession);
        final String originalPayload = (String) workItem.getWorkflowData().getPayload();
        final List<String> payloads = workflowPackageManager.getPaths(resourceResolver, originalPayload);
        final ProcessArgs processArgs = new ProcessArgs(metaDataMap);

        final AtomicInteger count = new AtomicInteger(0);

        // Anonymous inner class to facilitate counting of processed payloads
        final ResourceRunnable replicatorRunnable = new ResourceRunnable() {
            @Override
            public void run(final Resource resource) throws Exception {
                if (processArgs.isThrottle()) {
                    throttledTaskRunner.waitForLowCpuAndLowMemory();
                }
                replicator.replicate(resource.getResourceResolver().adaptTo(Session.class),
                        processArgs.getReplicationActionType(), resource.getPath(),
                        processArgs.getReplicationOptions(resource));
                count.incrementAndGet();
            }
        };

        final ContentVisitor visitor = new ContentVisitor(replicatorRunnable);

        for (final String payload : payloads) {
            final Resource resource = resourceResolver.getResource(payload);

            if (processArgs.isTraverseTree()) {
                // Traverse the tree
                visitor.accept(resource);
            } else {
                // Only execute on the provided payload
                replicatorRunnable.run(resource);
            }
        }

        log.info("Replicate with Options processed [ {} ] total payloads in {} ms", count.get(),
                System.currentTimeMillis() - start);
    } catch (Exception e) {
        throw new WorkflowException(e);
    }
}

From source file:com.svi.uzabase.logic.ValidationProcess.java

private List<XMLHolder> extractXML() {
    System.out.println("in extracting xml");
    BufferedReader brInput;//from  w  ww .ja va 2 s . c o  m
    String sCurrentLineInput;
    String[] fieldNo;
    String[] splitter;
    String toValidate;
    String str;
    List<XMLHolder> xmlBatchHolder = new ArrayList<>();
    XMLHolder xmlFileHolder;
    Field xmlField;
    org.jsoup.nodes.Document doc;
    progress = new AtomicInteger(0);
    total = new AtomicInteger(xmlHolder.size());
    mf.setJprogressValues(total, progress);
    for (String xmlPath : xmlHolder) {
        mf.loader("Extracting XML: ", false);
        xmlFileHolder = new XMLHolder();
        xmlFileHolder.setFileName(xmlPath);
        try {
            brInput = new BufferedReader(new FileReader(xmlPath));
            while ((sCurrentLineInput = brInput.readLine()) != null) {
                str = sCurrentLineInput;
                if (str.contains("field no=\"")) {
                    xmlField = new Field();
                    sCurrentLineInput = brInput.readLine();
                    fieldNo = str.split("\"");
                    str = sCurrentLineInput;
                    doc = Jsoup.parse(str, "", Parser.xmlParser());
                    toValidate = doc.select("value").text();
                    if (fieldNo.length < 1) {
                        xmlField.setFieldNo(0);
                    } else {
                        xmlField.setFieldNo(Integer.parseInt(fieldNo[1]));
                    }
                    xmlField.setValue(toValidate);
                    if (!toValidate.isEmpty()) {
                        xmlFileHolder.add(xmlField);
                    }
                }
            }
            brInput.close();
            xmlBatchHolder.add(xmlFileHolder);
        } catch (FileNotFoundException ex) {
            Logger.getLogger(ValidationProcess.class.getName()).log(Level.SEVERE, null, ex);
        } catch (IOException ex) {
            Logger.getLogger(ValidationProcess.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
    //Set field name based on the schema
    for (XMLHolder h : xmlBatchHolder) {
        for (Field f : h) {
            for (SchemaFields s : schemaFieldsList) {
                if (f.getFieldNo() == s.getFieldNo()) {
                    f.setFieldName(s.getFieldName());
                    break;
                }
            }
        }
    }
    //Set field types for validation
    FS fs;
    for (XMLHolder h : xmlBatchHolder) {
        fs = new FS();
        fs.setFileName(h.getFileName());
        for (Field f : h) {
            if (f.getFieldName().toLowerCase().contains("nationality")) {
                f.setType("nationality");
                f.setColumnHeader(f.getFieldName());
            } else if (f.getFieldName().toLowerCase().contains("company name")) {
                f.setType("corporation");
            } else if (f.getFieldName().toLowerCase().contains("position")) {
                f.setType("position");
            } else if (f.getFieldName().toLowerCase().contains("directors/officers")
                    && f.getFieldName().toLowerCase().contains("name")) {
                f.setType("name");
                f.setColumnHeader(f.getFieldName());
            } else if (f.getFieldName().toLowerCase().contains("directors/officers")
                    && f.getFieldName().toLowerCase().contains("tin")) {
                f.setType("tin");
                f.setColumnHeader(f.getFieldName());
            } else if (f.getFieldName().toLowerCase().contains("directors/officers")
                    && f.getFieldName().toLowerCase().contains("stockholder")) {
                f.setType("stockholder");
                f.setColumnHeader(f.getFieldName());
            } else if (f.getFieldName().toLowerCase().contains("directors/officers")
                    && !f.getFieldName().toLowerCase().contains("how many")) {
                splitter = f.getFieldName().split(":", 2);
                if (splitter[1].trim().equals("Board") || splitter[1].trim().equals("Officer")) {
                    f.setType("board");
                } else {
                    f.setType("none");
                }
                f.setColumnHeader(f.getFieldName());
                //                } else if (f.getFieldName().equalsIgnoreCase("address")) {
            } else if (f.getFieldNo() == 31) {
                f.setType("city");
                f.setColumnHeader(f.getFieldName());
            } else if (f.getFieldNo() == 32) {
                f.setType("province");
                f.setColumnHeader(f.getFieldName());
            } else if (f.getFieldName().toLowerCase().contains("phone number")) {
                f.setType("tel");
                f.setColumnHeader(f.getFieldName());
            } else if (f.getFieldName().toLowerCase().contains("fax number")) {
                f.setType("fax");
                f.setColumnHeader(f.getFieldName());
            } else if (f.getFieldName().toLowerCase().contains("contact person")) {
                f.setType("person");
                f.setColumnHeader(f.getFieldName());
            } else if (f.getFieldName().toLowerCase().contains("e-mail address")) {
                f.setType("email");
                f.setColumnHeader(f.getFieldName());
            } else if (f.getFieldName().toLowerCase().contains("website/url address")) {
                f.setType("website");
                f.setColumnHeader(f.getFieldName());
            } else if (f.getFieldName().toLowerCase().contains("tin/passport no.")) {
                f.setType("tin");
                f.setColumnHeader(f.getFieldName());
            } else if (f.getFieldName().toLowerCase().contains("% ownership")) {
                f.setType("ownership");
                f.setColumnHeader(f.getFieldName());
            } else if (f.getFieldName().toLowerCase().contains("share type")) {
                f.setType("shareType");
                f.setColumnHeader(f.getFieldName());
            } else if (f.getFieldNo() == 1) {
                f.setType("sec");
                f.setColumnHeader(f.getFieldName());
            } else if (f.getFieldNo() == 1207) {
                f.setType("tin");
                f.setColumnHeader(f.getFieldName());
            } else if (f.getFieldNo() == 7) {
                if (!f.getValue().equals("*N/A")) {
                    fs.setTotalAssets(Double.parseDouble(f.getValue().replaceAll(",", "")));
                }
                f.setType("assets");
                f.setColumnHeader(f.getFieldName());
            } else if (f.getFieldNo() == 9) {
                f.setType("liabilities");
                if (!f.getValue().equals("*N/A")) {
                    fs.setTotalLiabilities(Double.parseDouble(f.getValue().replaceAll(",", "")));
                }
                f.setColumnHeader(f.getFieldName());
            } else if (f.getFieldNo() == 11) {
                f.setType("balanceSheet");
                if (!f.getValue().equals("*N/A")) {
                    fs.setTotalShareholderEquity(Double.parseDouble(f.getValue().replaceAll(",", "")));
                }
                f.setColumnHeader(f.getFieldName());
            } else if (f.getFieldNo() == 3) {
                f.setType("grossc");
                if (!f.getValue().equals("*N/A")) {
                    fs.setGrossRevenue(Double.parseDouble(f.getValue().replaceAll(",", "")));
                }
                f.setColumnHeader(f.getFieldName());
            } else if (f.getFieldNo() == 4) {
                f.setType("gross");
                if (!f.getValue().equals("*N/A")) {
                    fs.setGrossRevenueP(Double.parseDouble(f.getValue().replaceAll(",", "")));
                }
                f.setColumnHeader(f.getFieldName());
            } else if (f.getFieldNo() == 5) {
                f.setType("netIncome");
                if (!f.getValue().equals("*N/A")) {
                    fs.setNetIncome(Double.parseDouble(f.getValue().replaceAll(",", "")));
                }
                f.setColumnHeader(f.getFieldName());
            } else if (f.getFieldNo() == 6) {
                f.setType("netIncomeP");
                if (!f.getValue().equals("*N/A")) {
                    fs.setNetIncomeP(Double.parseDouble(f.getValue().replaceAll(",", "")));
                }
                f.setColumnHeader(f.getFieldName());
            } else if (f.getFieldNo() == 1201) {
                f.setType("purpose");
            } else if (f.getFieldNo() == 1206) {
                f.setType("periodCovered");
            } else if (f.getFieldNo() == 1209) {
                f.setType("fiscalYear");
            } else {
                f.setType("none");
            }
        }
        fsData.add(fs);
    }

    List<String> foundDupAlready = new ArrayList<>();
    String tempFoundDup = "";
    int dupCtr = 0;
    for (XMLHolder h : xmlBatchHolder) {
        for (Field f1 : h) {
            if (f1.getValue().equals("*N/A")) {
                //skip if value is *N/A
                continue;
            }
            for (Field f2 : h) {
                if (f2.getValue().equals("*N/A")) {
                    //skip if value is *N/A
                    continue;
                }
                if (f1.getValue().equals(f2.getValue()) && !f1.getFieldName().toLowerCase().contains("company")
                        && !f2.getFieldName().toLowerCase().contains("company")
                        && f1.getFieldName().toLowerCase().contains("name")
                        && f2.getFieldName().toLowerCase().contains("name")
                        //Check if board or stockholder only
                        && ((f1.getFieldName().toLowerCase().contains("directors/officers")
                                && f2.getFieldName().toLowerCase().contains("directors/officers"))
                                || ((f1.getFieldName().toLowerCase().contains("name")
                                        && !f1.getFieldName().toLowerCase().contains("directors/officers"))
                                        && (f2.getFieldName().toLowerCase().contains("name") && !f2
                                                .getFieldName().toLowerCase().contains("directors/officers"))))
                        //end of condition to check
                        && !f1.getValue().isEmpty() && !tempFoundDup.equals(f2.getValue())) {
                    dupCtr++;
                    if (dupCtr == 2 && foundDupAlready.indexOf(f1.getValue()) < 0) {
                        if (f2.getFieldName().toLowerCase().contains("former")
                                || f2.getFieldName().toLowerCase().contains("building")) {
                            continue;
                        }
                        System.out.println("2 " + f2.getValue() + f2.getFieldName());
                        foundDupAlready.add(f2.getValue());
                        tempFoundDup = f2.getValue();
                        f2.add("Duplicate entry");
                    }
                }
            }
            dupCtr = 0;
        }
    }
    List<Field> dupeHolder = new ArrayList<>();
    for (XMLHolder z : xmlBatchHolder) {
        for (Field f1 : z) {
            for (String s : f1) {
                if (s.equals("Duplicate entry")) {
                    dupeHolder.add(f1);
                }
            }
        }
    }
    for (XMLHolder z : xmlBatchHolder) {
        for (Field f1 : z) {
            for (Field fd : dupeHolder) {
                if (fd.getFieldNo() != f1.getFieldNo() && fd.getValue().equals(f1.getValue())) {
                    f1.add("Duplicate entry");
                }
            }
        }
    }

    return validateData(xmlBatchHolder);
}

From source file:com.sishuok.bigpipe.handler.BigpipeTaskReturnValueHandler.java

@Override
public void handleReturnValue(final Object returnValue, final MethodParameter returnType,
        final ModelAndViewContainer mavContainer, final NativeWebRequest webRequest) throws Exception {

    final BigPipeTask bigPipeTask = (BigPipeTask) returnValue;

    final HttpServletRequest request = webRequest.getNativeRequest(HttpServletRequest.class);
    final HttpServletResponse response = webRequest.getNativeResponse(HttpServletResponse.class);

    final DeferredResult<Void> deferredResult = new DeferredResult<Void>();

    mavContainer.setRequestHandled(true);

    WebAsyncUtils.getAsyncManager(request).startDeferredResultProcessing(deferredResult, mavContainer);

    final BigPipeContext context = new BigPipeContext(request.getContextPath(), bigPipeTask.getModel());

    //?pagelet? ?
    final String framePageletName = bigPipeTask.getFramePageletName();
    final Pagelet framePagelet = pageletFinder.find(framePageletName);
    Assert.notNull(framePagelet, framePageletName + " pagelet not exists");

    final BigPipeContext frameContext = context.copy();
    final PageletResult framePageletResult = framePagelet.run(frameContext, response);
    final PageletView framePageletView = pageletViewResolver.resolve(framePageletResult);
    framePageletView.render(frameContext, response);

    final AtomicInteger counter = new AtomicInteger(bigPipeTask.getPageletNames().size());
    //?Npagelet?//from  w ww  . ja v  a 2s. c  o  m
    for (String otherPageletName : bigPipeTask.getPageletNames()) {
        final Pagelet pagelet = pageletFinder.find(otherPageletName);
        Assert.notNull(pagelet, otherPageletName + " pagelet not exists");

        //???
        executor.execute(new Runnable() {
            @Override
            public void run() {
                try {
                    final BigPipeContext pageletContext = context.copy();
                    final PageletResult pageletResult = pagelet.run(pageletContext, response);
                    final PageletView pageletView = pageletViewResolver.resolve(pageletResult);
                    pageletView.render(pageletContext, response);
                } catch (Exception e) {
                    e.printStackTrace();
                    //
                }
                if (counter.decrementAndGet() <= 0) {
                    deferredResult.setResult(null);
                }
            }
        });
    }
}

From source file:com.ccc.crest.core.client.CrestClient.java

public CrestClient(CrestController controller, String crestUrl, String xmlUrl, String userAgent,
        ExecutorService executor) {
    log = LoggerFactory.getLogger(getClass());
    this.controller = controller;
    this.crestUrl = StrH.stripTrailingSeparator(crestUrl, '/');
    this.xmlUrl = StrH.stripTrailingSeparator(xmlUrl, '/');
    this.userAgent = userAgent;
    refreshQueue = new PriorityQueue<>();
    this.executor = executor;
    crestClients = new ArrayList<>();
    xmlClients = new ArrayList<>();
    //        crestClientIndex = new AtomicInteger(-1);
    xmlClientIndex = new AtomicInteger(-1);
    for (int i = 0; i < CrestMaxClients; i++) {
        RequestThrottle crestGeneralThrottle = new RequestThrottle(CrestGeneralMaxRequestsPerSecond);
        RequestThrottle xmlGeneralThrottle = new RequestThrottle(XmlGeneralMaxRequestsPerSecond);
        CloseableHttpClient client = HttpClients.custom().setUserAgent(userAgent).build();
        ClientElement clientElement = new ClientElement(client, crestGeneralThrottle, xmlGeneralThrottle);
        crestClients.add(clientElement);
    }/*  ww w .  j  a  v a2s  . com*/
    for (int i = 0; i < CrestMaxClients; i++) {
        RequestThrottle xmlcrestGeneralThrottle = new RequestThrottle(CrestGeneralMaxRequestsPerSecond);
        RequestThrottle xmlGeneralThrottle = new RequestThrottle(XmlGeneralMaxRequestsPerSecond);
        CloseableHttpClient client = HttpClients.custom().setUserAgent(userAgent).build();
        ClientElement clientElement = new ClientElement(client, xmlGeneralThrottle, xmlGeneralThrottle);
        xmlClients.add(clientElement);
    }
}