List of usage examples for java.util.concurrent.atomic AtomicInteger AtomicInteger
public AtomicInteger(int initialValue)
From source file:com.taobao.weex.devtools.inspector.protocol.module.DOM.java
public DOM(Document document) { mObjectMapper = new ObjectMapper(); mDocument = Util.throwIfNull(document); mSearchResults = Collections.synchronizedMap(new HashMap<String, List<Integer>>()); mResultCounter = new AtomicInteger(0); mPeerManager = new ChromePeerManager(); mPeerManager.setListener(new PeerManagerListener()); mListener = new DocumentUpdateListener(); }
From source file:com.amazonaws.services.kinesis.producer.KinesisProducerTest.java
@Test public void differentCredsForRecordsAndMetrics() throws InterruptedException, ExecutionException { final String AKID_A = "AKIAAAAAAAAAAAAAAAAA"; final String AKID_B = "AKIABBBBBBBBBBBBBBBB"; final KinesisProducer kp = getProducer( new StaticCredentialsProvider(new BasicAWSCredentials(AKID_A, StringUtils.repeat("a", 40))), new StaticCredentialsProvider(new BasicAWSCredentials(AKID_B, StringUtils.repeat("b", 40)))); final long start = System.nanoTime(); while (System.nanoTime() - start < 500 * 1000000) { kp.addUserRecord("a", "a", ByteBuffer.wrap(new byte[0])); kp.flush();// www . ja va 2s. c o m Thread.sleep(10); } kp.flushSync(); kp.destroy(); Map<String, AtomicInteger> counts = new HashMap<String, AtomicInteger>(); counts.put(AKID_A, new AtomicInteger(0)); counts.put(AKID_B, new AtomicInteger(0)); for (ClientRequest cr : server.getRequests()) { String auth = cr.getHeaders().get("Authorization"); if (auth == null) { auth = cr.getHeaders().get("authorization"); } String host = cr.getHeaders().get("Host"); if (host == null) { cr.getHeaders().get("host"); } if (auth.contains(AKID_B)) { assertFalse(host.contains("kinesis")); counts.get(AKID_B).getAndIncrement(); } else if (auth.contains(AKID_A)) { assertFalse(host.contains("monitoring")); counts.get(AKID_A).getAndIncrement(); } else { fail("Expected AKID(s) not found in auth header"); } } assertTrue(counts.get(AKID_A).get() > 1); assertTrue(counts.get(AKID_B).get() > 1); }
From source file:com.simiacryptus.mindseye.applications.ObjectLocationBase.java
/** * Run.// w w w . j a va 2s .co m * * @param log the log */ public void run(@Nonnull final NotebookOutput log) { // @Nonnull String logName = "cuda_" + log.getName() + ".log"; // log.p(log.file((String) null, logName, "GPU Log")); // CudaSystem.addLog(new PrintStream(log.file(logName))); ImageClassifierBase classifier = getClassifierNetwork(); Layer classifyNetwork = classifier.getNetwork(); ImageClassifierBase locator = getLocatorNetwork(); Layer locatorNetwork = locator.getNetwork(); ArtistryUtil.setPrecision((DAGNetwork) classifyNetwork, Precision.Float); ArtistryUtil.setPrecision((DAGNetwork) locatorNetwork, Precision.Float); Tensor[][] inputData = loadImages_library(); // Tensor[][] inputData = loadImage_Caltech101(log); double alphaPower = 0.8; final AtomicInteger index = new AtomicInteger(0); Arrays.stream(inputData).limit(10).forEach(row -> { log.h3("Image " + index.getAndIncrement()); final Tensor img = row[0]; log.p(log.image(img.toImage(), "")); Result classifyResult = classifyNetwork.eval(new MutableResult(row)); Result locationResult = locatorNetwork.eval(new MutableResult(row)); Tensor classification = classifyResult.getData().get(0); List<CharSequence> categories = classifier.getCategories(); int[] sortedIndices = IntStream.range(0, categories.size()).mapToObj(x -> x) .sorted(Comparator.comparing(i -> -classification.get(i))).mapToInt(x -> x).limit(10).toArray(); logger.info(Arrays.stream(sortedIndices) .mapToObj( i -> String.format("%s: %s = %s%%", i, categories.get(i), classification.get(i) * 100)) .reduce((a, b) -> a + "\n" + b).orElse("")); LinkedHashMap<CharSequence, Tensor> vectors = new LinkedHashMap<>(); List<CharSequence> predictionList = Arrays.stream(sortedIndices).mapToObj(categories::get) .collect(Collectors.toList()); Arrays.stream(sortedIndices).limit(6).forEach(category -> { CharSequence name = categories.get(category); log.h3(name); Tensor alphaTensor = renderAlpha(alphaPower, img, locationResult, classification, category); log.p(log.image(img.toRgbImageAlphaMask(0, 1, 2, alphaTensor), "")); vectors.put(name, alphaTensor.unit()); }); Tensor avgDetection = vectors.values().stream().reduce((a, b) -> a.add(b)).get() .scale(1.0 / vectors.size()); Array2DRowRealMatrix covarianceMatrix = new Array2DRowRealMatrix(predictionList.size(), predictionList.size()); for (int x = 0; x < predictionList.size(); x++) { for (int y = 0; y < predictionList.size(); y++) { Tensor l = vectors.get(predictionList.get(x)); Tensor r = vectors.get(predictionList.get(y)); covarianceMatrix.setEntry(x, y, null == l || null == r ? 0 : (l.minus(avgDetection)).dot(r.minus(avgDetection))); } } @Nonnull final EigenDecomposition decomposition = new EigenDecomposition(covarianceMatrix); for (int objectVector = 0; objectVector < 10; objectVector++) { log.h3("Eigenobject " + objectVector); double eigenvalue = decomposition.getRealEigenvalue(objectVector); RealVector eigenvector = decomposition.getEigenvector(objectVector); Tensor detectionRegion = IntStream.range(0, eigenvector.getDimension()).mapToObj(i -> { Tensor tensor = vectors.get(predictionList.get(i)); return null == tensor ? null : tensor.scale(eigenvector.getEntry(i)); }).filter(x -> null != x).reduce((a, b) -> a.add(b)).get(); detectionRegion = detectionRegion.scale(255.0 / detectionRegion.rms()); CharSequence categorization = IntStream.range(0, eigenvector.getDimension()).mapToObj(i -> { CharSequence category = predictionList.get(i); double component = eigenvector.getEntry(i); return String.format("<li>%s = %.4f</li>", category, component); }).reduce((a, b) -> a + "" + b).get(); log.p(String.format("Object Detected: <ol>%s</ol>", categorization)); log.p("Object Eigenvalue: " + eigenvalue); log.p("Object Region: " + log.image(img.toRgbImageAlphaMask(0, 1, 2, detectionRegion), "")); log.p("Object Region Compliment: " + log.image(img.toRgbImageAlphaMask(0, 1, 2, detectionRegion.scale(-1)), "")); } // final int[] orderedVectors = IntStream.range(0, 10).mapToObj(x -> x) // .sorted(Comparator.comparing(x -> -decomposition.getRealEigenvalue(x))).mapToInt(x -> x).toArray(); // IntStream.range(0, orderedVectors.length) // .mapToObj(i -> { // //double realEigenvalue = decomposition.getRealEigenvalue(orderedVectors[i]); // return decomposition.getEigenvector(orderedVectors[i]).toArray(); // } // ).toArray(i -> new double[i][]); log.p(String.format( "<table><tr><th>Cosine Distance</th>%s</tr>%s</table>", Arrays.stream(sortedIndices).limit(10) .mapToObj(col -> "<th>" + categories.get(col) + "</th>").reduce((a, b) -> a + b).get(), Arrays.stream(sortedIndices).limit(10).mapToObj(r -> { return String.format("<tr><td>%s</td>%s</tr>", categories.get(r), Arrays.stream(sortedIndices).limit(10).mapToObj(col -> { Tensor l = vectors.get(categories.get(r)); Tensor r2 = vectors.get(categories.get(col)); return String.format("<td>%.4f</td>", (null == l || null == r2) ? 0 : Math.acos(l.dot(r2))); }).reduce((a, b) -> a + b).get()); }).reduce((a, b) -> a + b).orElse(""))); }); log.setFrontMatterProperty("status", "OK"); }
From source file:org.apache.hadoop.gateway.ha.dispatch.DefaultHaDispatchTest.java
@Test public void testConnectivityFailover() throws Exception { String serviceName = "OOZIE"; HaDescriptor descriptor = HaDescriptorFactory.createDescriptor(); descriptor.addServiceConfig(//from ww w .j a v a 2 s.c om HaDescriptorFactory.createServiceConfig(serviceName, "true", "1", "1000", "2", "1000", null, null)); HaProvider provider = new DefaultHaProvider(descriptor); URI uri1 = new URI("http://unreachable-host"); URI uri2 = new URI("http://reachable-host"); ArrayList<String> urlList = new ArrayList<String>(); urlList.add(uri1.toString()); urlList.add(uri2.toString()); provider.addHaService(serviceName, urlList); FilterConfig filterConfig = EasyMock.createNiceMock(FilterConfig.class); ServletContext servletContext = EasyMock.createNiceMock(ServletContext.class); EasyMock.expect(filterConfig.getServletContext()).andReturn(servletContext).anyTimes(); EasyMock.expect(servletContext.getAttribute(HaServletContextListener.PROVIDER_ATTRIBUTE_NAME)) .andReturn(provider).anyTimes(); BasicHttpParams params = new BasicHttpParams(); HttpUriRequest outboundRequest = EasyMock.createNiceMock(HttpRequestBase.class); EasyMock.expect(outboundRequest.getMethod()).andReturn("GET").anyTimes(); EasyMock.expect(outboundRequest.getURI()).andReturn(uri1).anyTimes(); EasyMock.expect(outboundRequest.getParams()).andReturn(params).anyTimes(); HttpServletRequest inboundRequest = EasyMock.createNiceMock(HttpServletRequest.class); EasyMock.expect(inboundRequest.getRequestURL()).andReturn(new StringBuffer(uri2.toString())).once(); EasyMock.expect(inboundRequest.getAttribute("dispatch.ha.failover.counter")).andReturn(new AtomicInteger(0)) .once(); EasyMock.expect(inboundRequest.getAttribute("dispatch.ha.failover.counter")).andReturn(new AtomicInteger(1)) .once(); HttpServletResponse outboundResponse = EasyMock.createNiceMock(HttpServletResponse.class); EasyMock.expect(outboundResponse.getOutputStream()) .andAnswer(new IAnswer<SynchronousServletOutputStreamAdapter>() { @Override public SynchronousServletOutputStreamAdapter answer() throws Throwable { return new SynchronousServletOutputStreamAdapter() { @Override public void write(int b) throws IOException { throw new IOException("unreachable-host"); } }; } }).once(); EasyMock.replay(filterConfig, servletContext, outboundRequest, inboundRequest, outboundResponse); Assert.assertEquals(uri1.toString(), provider.getActiveURL(serviceName)); DefaultHaDispatch dispatch = new DefaultHaDispatch(); dispatch.setHttpClient(new DefaultHttpClient()); dispatch.setHaProvider(provider); dispatch.setServiceRole(serviceName); dispatch.init(); long startTime = System.currentTimeMillis(); try { dispatch.executeRequest(outboundRequest, inboundRequest, outboundResponse); } catch (IOException e) { //this is expected after the failover limit is reached } long elapsedTime = System.currentTimeMillis() - startTime; Assert.assertEquals(uri2.toString(), provider.getActiveURL(serviceName)); //test to make sure the sleep took place Assert.assertTrue(elapsedTime > 1000); }
From source file:org.apache.hadoop.gateway.hdfs.dispatch.WebHdfsHaDispatchTest.java
@Test public void testConnectivityFailover() throws Exception { String serviceName = "WEBHDFS"; HaDescriptor descriptor = HaDescriptorFactory.createDescriptor(); descriptor.addServiceConfig(/* w ww. j av a 2s.c o m*/ HaDescriptorFactory.createServiceConfig(serviceName, "true", "1", "1000", "2", "1000", null, null)); HaProvider provider = new DefaultHaProvider(descriptor); URI uri1 = new URI("http://unreachable-host"); URI uri2 = new URI("http://reachable-host"); ArrayList<String> urlList = new ArrayList<String>(); urlList.add(uri1.toString()); urlList.add(uri2.toString()); provider.addHaService(serviceName, urlList); FilterConfig filterConfig = EasyMock.createNiceMock(FilterConfig.class); ServletContext servletContext = EasyMock.createNiceMock(ServletContext.class); EasyMock.expect(filterConfig.getServletContext()).andReturn(servletContext).anyTimes(); EasyMock.expect(servletContext.getAttribute(HaServletContextListener.PROVIDER_ATTRIBUTE_NAME)) .andReturn(provider).anyTimes(); BasicHttpParams params = new BasicHttpParams(); HttpUriRequest outboundRequest = EasyMock.createNiceMock(HttpRequestBase.class); EasyMock.expect(outboundRequest.getMethod()).andReturn("GET").anyTimes(); EasyMock.expect(outboundRequest.getURI()).andReturn(uri1).anyTimes(); EasyMock.expect(outboundRequest.getParams()).andReturn(params).anyTimes(); HttpServletRequest inboundRequest = EasyMock.createNiceMock(HttpServletRequest.class); EasyMock.expect(inboundRequest.getRequestURL()).andReturn(new StringBuffer(uri2.toString())).once(); EasyMock.expect(inboundRequest.getAttribute("dispatch.ha.failover.counter")).andReturn(new AtomicInteger(0)) .once(); EasyMock.expect(inboundRequest.getAttribute("dispatch.ha.failover.counter")).andReturn(new AtomicInteger(1)) .once(); HttpServletResponse outboundResponse = EasyMock.createNiceMock(HttpServletResponse.class); EasyMock.expect(outboundResponse.getOutputStream()) .andAnswer(new IAnswer<SynchronousServletOutputStreamAdapter>() { @Override public SynchronousServletOutputStreamAdapter answer() throws Throwable { return new SynchronousServletOutputStreamAdapter() { @Override public void write(int b) throws IOException { throw new IOException("unreachable-host"); } }; } }).once(); EasyMock.replay(filterConfig, servletContext, outboundRequest, inboundRequest, outboundResponse); Assert.assertEquals(uri1.toString(), provider.getActiveURL(serviceName)); WebHdfsHaDispatch dispatch = new WebHdfsHaDispatch(); dispatch.setHttpClient(new DefaultHttpClient()); dispatch.setHaProvider(provider); dispatch.init(); long startTime = System.currentTimeMillis(); try { dispatch.executeRequest(outboundRequest, inboundRequest, outboundResponse); } catch (IOException e) { //this is expected after the failover limit is reached } long elapsedTime = System.currentTimeMillis() - startTime; Assert.assertEquals(uri2.toString(), provider.getActiveURL(serviceName)); //test to make sure the sleep took place Assert.assertTrue(elapsedTime > 1000); }
From source file:com.android.tools.swing.ui.NavigationComponent.java
private void updateText() { if (myItemStack.isEmpty()) { setText(""); return;/*from w w w. j a v a2s.c o m*/ } if (myItemStack.size() == 1 && hasRootItem && !myDisplaySingleRoot) { setText(""); return; } final AtomicInteger id = new AtomicInteger(myItemStack.size() - 1); String text = Joiner.on(" > ") .join(Iterators.transform(myItemStack.descendingIterator(), new Function<T, String>() { @Override public String apply(T input) { // Do not display link for the last element. if (id.get() == 0) { return input.getDisplayText(); } return String.format("<a href=\"%d\">%s</a>", id.getAndDecrement(), input.getDisplayText()); } @Override public boolean equals(Object object) { return false; } })); setText(text); }
From source file:com.cronutils.model.time.TimeNode.java
/** * We return same reference value if matches or previous one if does not match. * Then we start applying shifts./*w ww . j a v a 2s . c o m*/ * This way we ensure same value is returned if no shift is requested. * @param reference - reference value * @param shiftsToApply - shifts to apply * @return NearestValue instance, never null. Holds information on nearest (backward) value and shifts performed. */ @VisibleForTesting NearestValue getNearestBackwardValue(int reference, int shiftsToApply) { List<Integer> values = new ArrayList<Integer>(this.values); Collections.reverse(values); int index = 0; boolean foundSmaller = false; AtomicInteger shift = new AtomicInteger(0); if (!values.contains(reference)) { for (Integer value : values) { if (value < reference) { index = values.indexOf(value); shiftsToApply--;//we just moved a position! foundSmaller = true; break; } } if (!foundSmaller) { shift.incrementAndGet(); } } else { index = values.indexOf(reference); } int value = values.get(index); for (int j = 0; j < shiftsToApply; j++) { value = getValueFromList(values, index + 1, shift); index = values.indexOf(value); } return new NearestValue(value, shift.get()); }
From source file:gr.aueb.cs.nlp.wordtagger.classifier.MetaClassifier.java
/** * returns the Word with the proper feature vector created fromt he output of input classifiers * @param inputWord//w w w . j a va2s. c o m * @return */ private Word tagsToFeats(Word inputWord) { double[][] feats = new double[1][0]; AtomicInteger i = new AtomicInteger(0); classifiers.forEach(v -> { feats[0] = ArrayUtils.addAll(feats[0], model.getCategoryAsOneOfAKDouble(v.classify(inputWord))); i.getAndAdd(totalCategories); }); return new Word(inputWord.getValue(), inputWord.getCategory(), new FeatureVector(feats[0], model.getCategoryAsOneOfAKDouble(inputWord))); }
From source file:de.tudarmstadt.lt.seg.sentence.SentenceSplitterTest.java
@Test public void ruleSplitterTest() { final AtomicInteger n = new AtomicInteger(0); ISentenceSplitter s = new RuleSplitter().initParam("default", false).init(TEST_TEXT); System.out.format("+++ %s +++ %n", s.getClass().getName()); s.forEach(seg -> {//from w w w .j a v a 2 s .c om if (seg.type == SegmentType.SENTENCE) n.incrementAndGet(); System.out.println(seg); }); System.out.println("+++"); s.init(TokenizerTest.TEST_TEXT); s.forEach(seg -> { if (seg.type == SegmentType.SENTENCE) n.incrementAndGet(); System.out.println(seg); }); System.out.format("%d sentences.%n", n.get()); }
From source file:org.talend.dataprep.command.CommandHelper.java
/** * Return a Publisher of type T out of the the hystrix command. * * @param clazz the wanted stream type.//from w ww . j av a2 s. c o m * @param mapper the object mapper used to parse objects. * @param command the hystrix command to deal with. * @param <T> the type of objects to stream. * @return a Publisher<T></T> out of the hystrix command response body. */ public static <T> Publisher<T> toPublisher(final Class<T> clazz, final ObjectMapper mapper, final HystrixCommand<InputStream> command) { AtomicInteger count = new AtomicInteger(0); return Flux.create(sink -> { final Observable<InputStream> observable = command.toObservable(); observable.map(i -> { try { return mapper.readerFor(clazz).<T>readValues(i); } catch (IOException e) { throw new TDPException(CommonErrorCodes.UNEXPECTED_EXCEPTION, e); } }) // .doOnCompleted(() -> LOGGER.debug("Completed command '{}' (emits '{}') with '{}' records.", command.getClass().getName(), clazz.getName(), count.get())) // .toBlocking() // .forEach(s -> { while (s.hasNext()) { sink.next(s.next()); count.incrementAndGet(); } sink.complete(); }); }, FluxSink.OverflowStrategy.BUFFER); }