List of usage examples for java.util.concurrent.atomic AtomicReference AtomicReference
public AtomicReference()
From source file:com.networknt.basicauth.BasicAuthHandlerTest.java
@Test public void testInvalidBasicHeaderCredentialInfo() throws Exception { final Http2Client client = Http2Client.getInstance(); final CountDownLatch latch = new CountDownLatch(1); final ClientConnection connection; try {// w w w .j a va 2s .c o m connection = client.connect(new URI("http://localhost:17352"), Http2Client.WORKER, Http2Client.SSL, Http2Client.BUFFER_POOL, OptionMap.EMPTY).get(); } catch (Exception e) { throw new ClientException(e); } final AtomicReference<ClientResponse> reference = new AtomicReference<>(); try { ClientRequest request = new ClientRequest().setPath("/v2/pet").setMethod(Methods.GET); request.getRequestHeaders().put(Headers.HOST, "localhost"); request.getRequestHeaders().put(Headers.AUTHORIZATION, "BASIC " + encodeCredentialsFullFormat("user1", "user1pass", "/")); connection.sendRequest(request, client.createClientCallback(reference, latch)); latch.await(); } catch (Exception e) { logger.error("Exception: ", e); throw new ClientException(e); } finally { IoUtils.safeClose(connection); } int statusCode = reference.get().getResponseCode(); Assert.assertEquals(401, statusCode); if (statusCode == 401) { Status status = Config.getInstance().getMapper() .readValue(reference.get().getAttachment(Http2Client.RESPONSE_BODY), Status.class); Assert.assertNotNull(status); Assert.assertEquals("ERR10046", status.getCode()); } }
From source file:hudson.plugins.jobConfigHistory.FileHistoryDaoTest.java
/** * Test of getRootDir method, of class FileHistoryDao. */// w w w . j a v a 2s. co m @Test public void testGetRootDir() { AtomicReference<Calendar> timestampHolder = new AtomicReference<Calendar>(); File result = sutWithoutUserAndDuplicateHistory.getRootDir(test1Config, timestampHolder); assertTrue(result.exists()); assertThat(result.getPath(), containsString("config-history" + File.separator + "jobs" + File.separator + "Test1")); }
From source file:com.blacklocus.jres.request.index.JresUpdateDocumentScriptTest.java
@Test public void testRetryOnConflict() throws InterruptedException { final String index = "JresUpdateDocumentScriptTest.testRetryOnConflict".toLowerCase(); final String type = "test"; final String id = "warzone"; final AtomicInteger total = new AtomicInteger(); final AtomicReference<String> error = new AtomicReference<String>(); final Random random = new Random(System.currentTimeMillis()); final int numThreads = 16, numIterations = 100; ExecutorService x = Executors.newFixedThreadPool(numThreads); for (int i = 0; i < numThreads; i++) { x.submit(new Runnable() { @Override//ww w. ja v a 2 s . c o m public void run() { try { for (int j = 0; j < numIterations; j++) { int increment = random.nextInt(5); total.addAndGet(increment); JresUpdateDocumentScript req = new JresUpdateDocumentScript(index, type, id, "ctx._source.value += increment", ImmutableMap.of("increment", increment), ImmutableMap.of("value", increment), null); req.setRetryOnConflict(numIterations * 10); jres.quest(req); } } catch (Exception e) { error.set(e.getMessage()); } } }); } x.shutdown(); x.awaitTermination(1, TimeUnit.MINUTES); Assert.assertNull("With so many retries, all of these should have gotten through without conflict error", error.get()); jres.quest(new JresRefresh(index)); JresGetDocumentReply getReply = jres.quest(new JresGetDocument(index, type, id)); Map<String, Integer> doc = getReply.getSourceAsType(new TypeReference<Map<String, Integer>>() { }); Assert.assertEquals("All increments should have gotten committed", (Object) total.get(), doc.get("value")); Assert.assertEquals("Should have been numThreads * numIterations versions committed", (Object) (numThreads * numIterations), getReply.getVersion()); }
From source file:com.vmware.admiral.adapter.docker.service.SystemImageRetrievalManagerTest.java
@Test public void testGetFromClassPath() throws Throwable { Path testXenonImagesPath = Files.createTempDirectory("test-xenon-images"); HostInitCommonServiceConfig.startServices(host); waitForServiceAvailability(ConfigurationFactoryService.SELF_LINK); waitForServiceAvailability(UriUtils.buildUriPath(UriUtils .buildUriPath(ConfigurationFactoryService.SELF_LINK, FileUtil.USER_RESOURCES_PATH_VARIABLE))); // Set expected configuration ConfigurationState config = new ConfigurationState(); config.documentSelfLink = UriUtils.buildUriPath(ConfigurationFactoryService.SELF_LINK, FileUtil.USER_RESOURCES_PATH_VARIABLE); config.key = FileUtil.USER_RESOURCES_PATH_VARIABLE; config.value = testXenonImagesPath.toAbsolutePath().toString(); doPost(config, ConfigurationFactoryService.SELF_LINK); File imageDir = new File(UriUtils.buildUriPath(testXenonImagesPath.toString(), SystemImageRetrievalManager.SYSTEM_IMAGES_PATH)); imageDir.mkdir();/*from ww w. ja v a2 s . co m*/ byte[] content = IOUtils .toByteArray(Thread.currentThread().getContextClassLoader().getResourceAsStream(TEST_IMAGE)); AdapterRequest req = new AdapterRequest(); req.resourceReference = host.getUri(); AtomicReference<byte[]> retrievedImageRef = new AtomicReference<>(); TestContext ctx = testCreate(1); retrievalManager.retrieveAgentImage(TEST_IMAGE, req, (image) -> { retrievedImageRef.set(image); ctx.completeIteration(); }); ctx.await(); byte[] image = retrievedImageRef.get(); Assert.assertEquals("Unexpected content", new String(content), new String(image)); }
From source file:com.bfd.harpc.monitor.RpcMonitor.java
/** * ?/*from w w w . ja v a2 s . c o m*/ * <p> * * @param serverNode * {@link ServerNode} * @param info * {@link StatisticsInfo} */ public void collect(ServerNode serverNode, StatisticsInfo info) { // ? // ?? AtomicReference<StatisticsInfo> reference = statisticsMap.get(serverNode); if (reference == null) { statisticsMap.putIfAbsent(serverNode, new AtomicReference<StatisticsInfo>()); reference = statisticsMap.get(serverNode); } // CompareAndSet?? updateStatistics(info, reference); // ? // ?? AtomicReference<StatisticsInfo> totalReference = totalStatisticsMap.get(serverNode); if (totalReference == null) { totalStatisticsMap.putIfAbsent(serverNode, new AtomicReference<StatisticsInfo>()); totalReference = totalStatisticsMap.get(serverNode); } // CompareAndSet?? updateStatistics(info, totalReference); }
From source file:com.scb.edmhdpif.processors.RowIdDBSTest.java
@Override public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException { FlowFile flowFile = session.get();/* w w w . j av a 2 s. c om*/ if (flowFile == null) { return; } final StopWatch stopWatch = new StopWatch(true); final char inputDelimiter = replaceDelimiter( context.getProperty(INPUT_DELIMITER).evaluateAttributeExpressions().getValue()); final String geofile = context.getProperty(GeoFile).evaluateAttributeExpressions(flowFile).getValue(); final String dlon = context.getProperty(DLon).evaluateAttributeExpressions(flowFile).getValue(); final String dlat = context.getProperty(DLat).evaluateAttributeExpressions(flowFile).getValue(); final String plon = context.getProperty(PLon).evaluateAttributeExpressions(flowFile).getValue(); final String plat = context.getProperty(PLat).evaluateAttributeExpressions(flowFile).getValue(); GeoUtil.setFilePath(geofile); try { AtomicReference<Long> totalRowCount = new AtomicReference<>(); AtomicReference<String> fromcellvalue = new AtomicReference<>(); AtomicReference<String> tocellvalue = new AtomicReference<>(); AtomicReference<String> rowidvalue = new AtomicReference<>(); AtomicReference<ArrayList<String>> provEvents = new AtomicReference<>(); flowFile = session.write(flowFile, new StreamCallback() { @Override public void process(final InputStream rawIn, final OutputStream rawOut) throws IOException { try (final BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(rawOut)); final BufferedReader reader = new BufferedReader(new InputStreamReader(rawIn))) { long rowcount = 0; String fromcell = ""; String tocell = ""; String rowId = ""; String line = ""; ArrayList<String> rowIds = new ArrayList<>(); while ((line = reader.readLine()) != null) { rowcount++; rowId = Long.toString(rowcount) + "_" + Long.toString(DateTime.now(DateTimeZone.UTC).getMillis()) + "_" + UUID.randomUUID().toString(); //-------------------------------------------------- double pickup_latitude = 0; double pickup_longitude = 0; double dropoff_latitude = 0; double dropoff_longitude = 0; try { pickup_longitude = Double.valueOf(plon); pickup_latitude = Double.valueOf(plat); } catch (Exception e2) { e2.printStackTrace(); } try { dropoff_longitude = Double.valueOf(dlon); dropoff_latitude = Double.valueOf(dlat); } catch (Exception e1) { e1.printStackTrace(); } try { GeoUtil geo = GeoUtil.getInstance(); if (geo.contain(pickup_longitude, pickup_latitude)) { fromcell = geo.getCellId(); } else { fromcell = "OutLiner"; } if (geo.contain(dropoff_longitude, dropoff_latitude)) { tocell = geo.getCellId(); } else { tocell = "OutLiner"; } //-------------------------------------------------- } catch (Exception j) { j.printStackTrace(); } writer.write(rowId + inputDelimiter + line + inputDelimiter + fromcell + inputDelimiter + tocell); writer.newLine(); rowIds.add(rowId); } provEvents.set(rowIds); totalRowCount.set(rowcount); fromcellvalue.set(fromcell); tocellvalue.set(tocell); rowidvalue.set(rowId); writer.flush(); } } }); stopWatch.stop(); flowFile = session.putAttribute(flowFile, TOTAL_ROW_COUNT, totalRowCount.get().toString()); flowFile = session.putAttribute(flowFile, FROM_CELL, fromcellvalue.get().toString()); flowFile = session.putAttribute(flowFile, TO_CELL, tocellvalue.get().toString()); flowFile = session.putAttribute(flowFile, ROW_ID, rowidvalue.get().toString()); flowFile = session.putAttribute(flowFile, ROW_ID_ADDED, "true"); final String tableName = flowFile.getAttribute("tablename"); for (final String rowId : provEvents.get()) { final String provEvent = rowId + "," + tableName; session.getProvenanceReporter().modifyContent(flowFile, provEvent); } session.getProvenanceReporter().modifyContent(flowFile, stopWatch.getDuration(TimeUnit.MILLISECONDS)); session.transfer(flowFile, REL_SUCCESS); } catch (final ProcessException e) { session.transfer(flowFile, REL_FAILURE); throw e; } }
From source file:com.spotify.heroic.HeroicCore.java
/** * Start the Heroic core, step by step/* w w w .j ava 2 s. c om*/ * <p> * <p> * It sets up the early injector which is responsible for loading all the necessary components * to parse a configuration file. * <p> * <p> * Load all the external modules, which are configured in {@link #modules}. * <p> * <p> * Load and build the configuration using the early injector * <p> * <p> * Setup the primary injector which will provide the dependencies to the entire application * <p> * <p> * Run all bootstraps that are configured in {@link #late} * <p> * <p> * Start all the external modules. {@link #startLifeCycles} */ public HeroicCoreInstance newInstance() throws Exception { final CoreLoadingComponent loading = loadingInjector(); loadModules(loading); final HeroicConfig config = config(loading); final CoreEarlyComponent early = earlyInjector(loading, config); runBootstrappers(early, this.early); // Initialize the instance injector with access to early components. final AtomicReference<CoreComponent> injector = new AtomicReference<>(); final HeroicCoreInstance instance = new Instance(loading.async(), injector, early, config, this.late); final CoreComponent primary = primaryInjector(early, config, instance); primary.loadingLifeCycle().install(); primary.internalLifeCycleRegistry().scoped("startup future").start(() -> { ((CoreHeroicContext) primary.context()).resolveCoreFuture(); return primary.async().resolved(null); }); // Update the instance injector, giving dynamic components initialized after this point // access to the primary // injector. injector.set(primary); return instance; }
From source file:org.opennms.newts.gsod.ImportRunner.java
public void execute(String... args) throws Exception { CmdLineParser parser = new CmdLineParser(this); try {/*from ww w . j a v a2s . c om*/ parser.parseArgument(args); } catch (CmdLineException e) { // handling of wrong arguments System.err.println(e.getMessage()); parser.printUsage(System.err); return; } // Setup the slf4j metrics reporter MetricRegistry metrics = new MetricRegistry(); final long start = System.currentTimeMillis(); metrics.register("elapsed-seconds", new Gauge<Double>() { @Override public Double getValue() { return (System.currentTimeMillis() - start) / 1000.0; } }); final ConsoleReporter reporter = ConsoleReporter.forRegistry(metrics).outputTo(System.err) .convertRatesTo(SECONDS).convertDurationsTo(MILLISECONDS).build(); reporter.start(10, SECONDS); if (m_restUrl == null) { // we are using a direct importer so use a NewtsReporter for storing metrics NewtsReporter newtsReporter = NewtsReporter.forRegistry(metrics).name("importer") .convertRatesTo(SECONDS).convertDurationsTo(MILLISECONDS).build(repository()); newtsReporter.start(1, SECONDS); } LOG.debug("Scanning {} for GSOD data files...", m_source); // walk the files in the directory given Observable<Sample> samples = fileTreeWalker(m_source.toPath()).subscribeOn(Schedulers.io()) // set up a meter for each file processed .map(meter(metrics.meter("files"), Path.class)) // report file .map(reportFile()) // read all the files and convert them into lines .mergeMap(lines()) // excluding the header lines .filter(exclude("YEARMODA")) // turn each line into a list of samples .mergeMap(samples()) // adjust time on samples according to arguments .map(adjustTime()) // meter the samples .map(meter(metrics.meter("samples"), Sample.class)); Observable<List<Sample>> batches = samples // create batches each second or of size m_samplesPerBatch whichever comes first .buffer(m_samplesPerBatch); Observable<Boolean> doImport = m_restUrl != null ? restPoster(batches, metrics) : directPoster(batches, metrics); System.err.println("doImport = " + doImport); // GO!!! final AtomicReference<Subscription> subscription = new AtomicReference<>(); final AtomicBoolean failed = new AtomicBoolean(false); final CountDownLatch latch = new CountDownLatch(1); Subscription s = doImport.subscribe(new Observer<Boolean>() { @Override public void onCompleted() { System.err.println("Finished Importing Everything!"); reporter.report(); latch.countDown(); System.exit(0); } @Override public void onError(Throwable e) { failed.set(true); System.err.println("Error importing!"); e.printStackTrace(); try { //latch.await(); Subscription s = subscription.get(); if (s != null) s.unsubscribe(); } catch (Exception ex) { System.err.println("Failed to close httpClient!"); ex.printStackTrace(); } finally { //dumpThreads(); } } @Override public void onNext(Boolean t) { System.err.println("Received a boolen: " + t); } }); subscription.set(s); if (failed.get()) { s.unsubscribe(); } //latch.countDown(); System.err.println("Return from Subscribe!"); latch.await(); //dumpThreads(); }
From source file:com.netflix.curator.framework.recipes.cache.TestPathChildrenCache.java
@Test public void testDeleteThenCreate() throws Exception { CuratorFramework client = CuratorFrameworkFactory.newClient(server.getConnectString(), new RetryOneTime(1)); client.start();//from w w w . j a v a 2 s. c o m try { client.create().forPath("/test"); client.create().forPath("/test/foo", "one".getBytes()); final AtomicReference<Throwable> error = new AtomicReference<Throwable>(); client.getUnhandledErrorListenable().addListener(new UnhandledErrorListener() { @Override public void unhandledError(String message, Throwable e) { error.set(e); } }); final CountDownLatch removedLatch = new CountDownLatch(1); final CountDownLatch postRemovedLatch = new CountDownLatch(1); final CountDownLatch dataLatch = new CountDownLatch(1); PathChildrenCache cache = new PathChildrenCache(client, "/test", true); cache.getListenable().addListener(new PathChildrenCacheListener() { @Override public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) throws Exception { if (event.getType() == PathChildrenCacheEvent.Type.CHILD_REMOVED) { removedLatch.countDown(); Assert.assertTrue(postRemovedLatch.await(10, TimeUnit.SECONDS)); } else { try { Assert.assertEquals(event.getData().getData(), "two".getBytes()); } finally { dataLatch.countDown(); } } } }); cache.start(PathChildrenCache.StartMode.BUILD_INITIAL_CACHE); client.delete().forPath("/test/foo"); Assert.assertTrue(removedLatch.await(10, TimeUnit.SECONDS)); client.create().forPath("/test/foo", "two".getBytes()); postRemovedLatch.countDown(); Assert.assertTrue(dataLatch.await(10, TimeUnit.SECONDS)); Throwable t = error.get(); if (t != null) { Assert.fail("Assert", t); } cache.close(); } finally { client.close(); } }
From source file:com.wk.lodge.composite.web.tomcat.IntegrationCompositeTests.java
@Test public void testJoin() throws Exception { final CountDownLatch latch = new CountDownLatch(1); final AtomicReference<Throwable> failure = new AtomicReference<Throwable>(); URI uri = new URI("ws://localhost:" + port + "/composite"); WebSocketStompClient stompClient = new WebSocketStompClient(uri, this.headers, sockJsClient); stompClient.setMessageConverter(new MappingJackson2MessageConverter()); stompClient.connect(new StompMessageHandler() { private StompSession stompSession; @Override//from w ww. ja v a 2s .c o m public void afterConnected(StompSession stompSession, StompHeaderAccessor headers) { this.stompSession = stompSession; this.stompSession.subscribe("/user/queue/device", null); try { JoinMessage join = new JoinMessage(); Device d = new Device(); d.setUuid(UUID.randomUUID()); join.setDevice(d); join.setGeo(new float[] { lat, lon }); join.setType(JoinMessage.Types.exit); join.setPoint(new int[] { 0, 0 }); join.setVector(new float[] { 1, 1 }); stompSession.send("/app/join", join); } catch (Throwable t) { failure.set(t); latch.countDown(); } } @Override public void handleMessage(Message<byte[]> message) { try { String json = parseMessageJson(message); new JsonPathExpectationsHelper("devices").exists(json); new JsonPathExpectationsHelper("devices").assertValueIsArray(json); new JsonPathExpectationsHelper("type").assertValue(json, "join"); } catch (Throwable t) { failure.set(t); } finally { this.stompSession.disconnect(); latch.countDown(); } } @Override public void handleError(Message<byte[]> message) { StompHeaderAccessor accessor = StompHeaderAccessor.wrap(message); String error = "[Producer] " + accessor.getShortLogMessage(message.getPayload()); logger.error(error); failure.set(new Exception(error)); } @Override public void handleReceipt(String receiptId) { } @Override public void afterDisconnected() { } }); if (!latch.await(10, TimeUnit.SECONDS)) { fail("Join response not received"); } else if (failure.get() != null) { throw new AssertionError("", failure.get()); } }