Example usage for java.util.concurrent.atomic AtomicReference getAndSet

List of usage examples for java.util.concurrent.atomic AtomicReference getAndSet

Introduction

In this page you can find the example usage for java.util.concurrent.atomic AtomicReference getAndSet.

Prototype

@SuppressWarnings("unchecked")
public final V getAndSet(V newValue) 

Source Link

Document

Atomically sets the value to newValue and returns the old value, with memory effects as specified by VarHandle#getAndSet .

Usage

From source file:Main.java

static <T> T ThrsafeExchangePointer(AtomicReference<T> papDestination, T apExchange) {
    //       atomicptr apDestinationValue = *papDestination;
    //       *papDestination = apExchange;
    //       return apDestinationValue;
    return papDestination.getAndSet(apExchange);
}

From source file:com.sonyericsson.hudson.plugins.gerrit.trigger.mock.TestUtils.java

/**
 * Get the future build to start as reference.
 *
 * @param event the event to monitor.//from   w w w.  j av a2 s .c o m
 * @return the reference of future build to start.
 */
public static AtomicReference<Run> getFutureBuildToStart(GerritEventLifecycle event) {
    final AtomicReference<Run> reference = new AtomicReference<Run>();
    event.addListener(new GerritEventLifeCycleAdaptor() {
        @Override
        public void buildStarted(GerritEvent event, Run build) {
            reference.getAndSet(build);
        }
    });
    return reference;
}

From source file:com.sonyericsson.hudson.plugins.gerrit.trigger.mock.TestUtils.java

/**
 * Get the future build to start as reference.
 *
 * @param event the event to monitor.//from ww  w . jav  a  2s  .  co  m
 * @return the reference of future build to start.
 */
public static AtomicReference<Run> getFutureBuildToStart2(GerritEventLifecycle event) {
    final AtomicReference<Run> reference = new AtomicReference<Run>();
    event.addListener(new GerritEventLifeCycleAdaptor() {
        @Override
        public void buildStarted(GerritEvent event, Run build) {
            reference.getAndSet(build);
        }
    });
    return reference;
}

From source file:info.archinnov.achilles.test.integration.tests.AsyncBatchModeIT.java

@Test
public void should_reinit_batch_context_and_consistency_after_exception_async() throws Exception {
    Tweet tweet1 = TweetTestBuilder.tweet().randomId().content("simple_tweet1").buid();
    Tweet tweet2 = TweetTestBuilder.tweet().randomId().content("simple_tweet2").buid();

    final CountDownLatch latch = new CountDownLatch(1);
    final AtomicReference<Object> successSpy = new AtomicReference<>();
    FutureCallback<Object> successCallBack = new FutureCallback<Object>() {
        @Override//from  w  ww  . j a va2  s .c om
        public void onSuccess(Object result) {
            successSpy.getAndSet(result);
            latch.countDown();
        }

        @Override
        public void onFailure(Throwable t) {
            latch.countDown();
        }
    };

    asyncManager.insert(tweet1);

    // Start batch
    AsyncBatch batch = asyncManager.createBatch();

    batch.startBatch(EACH_QUORUM);
    batch.insert(tweet2);

    batch.asyncEndBatch(successCallBack);

    latch.await();

    assertThatBatchContextHasBeenReset(batch);

    logAsserter.prepareLogLevelForDriverConnection();
    batch.startBatch();
    batch.insert(tweet2);
    batch.asyncEndBatch();
    logAsserter.assertConsistencyLevels(ONE);

    assertThat(successSpy.get()).isEqualTo(Empty.INSTANCE);
}

From source file:info.archinnov.achilles.test.integration.tests.AsyncBatchModeIT.java

@Test
public void should_batch_with_custom_consistency_level_async() throws Exception {
    Tweet tweet1 = TweetTestBuilder.tweet().randomId().content("simple_tweet1").buid();
    Tweet tweet2 = TweetTestBuilder.tweet().randomId().content("simple_tweet2").buid();
    Tweet tweet3 = TweetTestBuilder.tweet().randomId().content("simple_tweet3").buid();

    final CountDownLatch latch = new CountDownLatch(1);
    final AtomicReference<Object> successSpy = new AtomicReference<>();
    FutureCallback<Object> successCallBack = new FutureCallback<Object>() {
        @Override/*from w w w.j  a v  a2 s  .c  o  m*/
        public void onSuccess(Object result) {
            successSpy.getAndSet(result);
            latch.countDown();
        }

        @Override
        public void onFailure(Throwable t) {
            latch.countDown();
        }
    };

    asyncManager.insert(tweet1).getImmediately();

    // Start batch
    AsyncBatch batch = asyncManager.createBatch();

    batch.startBatch(QUORUM);

    logAsserter.prepareLogLevelForDriverConnection();

    Tweet foundTweet1 = asyncManager.find(Tweet.class, tweet1.getId()).getImmediately();

    assertThat(foundTweet1.getContent()).isEqualTo(tweet1.getContent());

    batch.insert(tweet2);
    batch.insert(tweet3);

    batch.asyncEndBatch(successCallBack);

    latch.await();

    logAsserter.assertConsistencyLevels(QUORUM);
    assertThatBatchContextHasBeenReset(batch);

    assertThat(successSpy.get()).isSameAs(Empty.INSTANCE);
}

From source file:org.apache.rya.streams.client.command.AddQueryAndLoadStatementsStreamsIT.java

@Test
public void testLubm() throws Exception {
    // Arguments that add a query to Rya Streams.
    final String query = "PREFIX lubm: <" + LUBM_PREFIX + "> \n" + "SELECT * WHERE \n" + "{ \n"
            + "  ?graduateStudent a lubm:GraduateStudent . \n" + "  ?underGradUniversity a lubm:University . \n"
            + "  ?graduateStudent lubm:undergraduateDegreeFrom ?underGradUniversity . \n" + "}";

    final String query2 = "PREFIX lubm: <" + LUBM_PREFIX + "> \n" + "SELECT * WHERE \n" + "{ \n"
            + "  ?graduateStudent a lubm:GraduateStudent . \n" + "  ?underGradUniversity a lubm:University . \n"
            + "  ?graduateStudent lubm:undergraduateDegreeFrom ?underGradUniversity . \n" + "}";

    final String[] addArgs = new String[] { "--ryaInstance", "" + ryaInstance, "--kafkaHostname",
            kafka.getKafkaHostname(), "--kafkaPort", kafka.getKafkaPort(), "--query", query, "--isActive",
            "true", "--isInsert", "false" };

    final String[] addArgs2 = new String[] { "--ryaInstance", "" + ryaInstance, "--kafkaHostname",
            kafka.getKafkaHostname(), "--kafkaPort", kafka.getKafkaPort(), "--query", query2, "--isActive",
            "true", "--isInsert", "false" };

    // Execute the command.
    final AddQueryCommand command = new AddQueryCommand();
    command.execute(addArgs);// w  w w  . j a  va2s.c  om
    // Add the same query twice to confirm that joins aren't being performed
    // across both queries.
    command.execute(addArgs2);

    // Show that the query was added to the Query Repository.
    final Set<StreamsQuery> queries = queryRepo.list();
    assertEquals(2, queries.size());
    final StreamsQuery streamsQuery = queries.iterator().next();
    final UUID queryId = streamsQuery.getQueryId();
    assertEquals(query, queries.iterator().next().getSparql());

    // Load a file of statements into Kafka.
    final String visibilities = "";
    final String[] loadArgs = new String[] { "--ryaInstance", "" + ryaInstance, "--kafkaHostname",
            kafka.getKafkaHostname(), "--kafkaPort", kafka.getKafkaPort(), "--statementsFile",
            LUBM_FILE.toString(), "--visibilities", visibilities };

    // Load the file of statements into the Statements topic.
    new LoadStatementsCommand().execute(loadArgs);

    final String statementsTopic = KafkaTopics.statementsTopic(ryaInstance);
    final String resultsTopic = KafkaTopics.queryResultsTopic(ryaInstance, queryId);

    final TopologyFactory factory = new TopologyFactory();
    final TopologyBuilder builder = factory.build(query, statementsTopic, resultsTopic,
            new RandomUUIDFactory());

    // Start the streams program.
    final Properties props = kafka.createBootstrapServerConfig();
    props.put(StreamsConfig.APPLICATION_ID_CONFIG, UUID.randomUUID().toString());
    props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

    final AtomicReference<String> errorMessage = new AtomicReference<>();
    final KafkaStreams streams = new KafkaStreams(builder, new StreamsConfig(props));
    streams.setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
        @Override
        public void uncaughtException(final Thread thread, final Throwable throwable) {
            final String stackTrace = ExceptionUtils.getStackTrace(throwable);
            errorMessage.getAndSet("Kafka Streams threw an uncaught exception in thread (" + thread.getName()
                    + "): " + stackTrace);
        }
    });
    streams.cleanUp();
    try {
        streams.start();

        // Wait for the streams application to start. Streams only see data after their consumers are connected.
        Thread.sleep(6000);

        // Wait for the final results to appear in the output topic and verify the expected Binding Sets were found.
        try (Consumer<String, VisibilityBindingSet> consumer = KafkaTestUtil.fromStartConsumer(kafka,
                StringDeserializer.class, VisibilityBindingSetDeserializer.class)) {
            // Register the topic.
            consumer.subscribe(Arrays.asList(resultsTopic));

            // Poll for the result.
            final Set<VisibilityBindingSet> results = Sets.newHashSet(KafkaTestUtil.pollForResults(500,
                    2 * LUBM_EXPECTED_RESULTS_COUNT, LUBM_EXPECTED_RESULTS_COUNT, consumer));

            System.out.println("LUBM Query Results Count: " + results.size());
            // Show the correct binding sets results from the job.
            assertEquals(LUBM_EXPECTED_RESULTS_COUNT, results.size());
        }
    } finally {
        streams.close();
    }

    if (StringUtils.isNotBlank(errorMessage.get())) {
        fail(errorMessage.get());
    }
}

From source file:info.archinnov.achilles.test.integration.tests.AsyncBatchModeIT.java

@Test
public void should_batch_counters_async() throws Exception {
    // Start batch
    AsyncBatch batch = asyncManager.createBatch();
    batch.startBatch();// w w w . j av a2s  . co m

    CompleteBean entity = CompleteBeanTestBuilder.builder().randomId().name("name").buid();

    entity = batch.insert(entity);

    entity.setLabel("label");

    Tweet welcomeTweet = TweetTestBuilder.tweet().randomId().content("welcomeTweet").buid();
    entity.setWelcomeTweet(welcomeTweet);

    entity.getVersion().incr(10L);
    batch.update(entity);

    RegularStatement selectLabel = select("label").from("CompleteBean").where(eq("id", entity.getId()));
    Map<String, Object> result = asyncManager.nativeQuery(selectLabel).getFirst().getImmediately();
    assertThat(result).isNull();

    RegularStatement selectCounter = select("counter_value").from("achilles_counter_table")
            .where(eq("fqcn", CompleteBean.class.getCanonicalName()))
            .and(eq("primary_key", entity.getId().toString())).and(eq("property_name", "version"));

    result = asyncManager.nativeQuery(selectCounter).getFirst().getImmediately();

    assertThat(result).isNull();

    final CountDownLatch latch = new CountDownLatch(2);
    final AtomicReference<Object> successSpy = new AtomicReference<>();
    final AtomicReference<Throwable> exceptionSpy = new AtomicReference<>();

    FutureCallback<Object> successCallBack = new FutureCallback<Object>() {
        @Override
        public void onSuccess(Object result) {
            successSpy.getAndSet(result);
            latch.countDown();
        }

        @Override
        public void onFailure(Throwable t) {
            latch.countDown();
        }
    };

    FutureCallback<Object> errorCallBack = new FutureCallback<Object>() {
        @Override
        public void onSuccess(Object result) {
            latch.countDown();
        }

        @Override
        public void onFailure(Throwable t) {
            exceptionSpy.getAndSet(t);
            latch.countDown();
        }
    };

    // Flush
    batch.asyncEndBatch(successCallBack, errorCallBack);

    latch.await();

    Statement statement = new SimpleStatement("SELECT label from CompleteBean where id=" + entity.getId());
    Row row = asyncManager.getNativeSession().execute(statement).one();
    assertThat(row.getString("label")).isEqualTo("label");

    result = asyncManager.nativeQuery(selectCounter).getFirst().getImmediately();
    assertThat(result.get("counter_value")).isEqualTo(10L);
    assertThatBatchContextHasBeenReset(batch);

    assertThat(successSpy.get()).isNotNull().isSameAs(Empty.INSTANCE);
    assertThat(exceptionSpy.get()).isNull();
}

From source file:info.archinnov.achilles.test.integration.tests.AsyncBatchModeIT.java

@Test
public void should_batch_several_entities_async() throws Exception {
    CompleteBean bean = CompleteBeanTestBuilder.builder().randomId().name("name").buid();
    Tweet tweet1 = TweetTestBuilder.tweet().randomId().content("tweet1").buid();
    Tweet tweet2 = TweetTestBuilder.tweet().randomId().content("tweet2").buid();

    final CountDownLatch latch = new CountDownLatch(2);
    final AtomicReference<Object> successSpy = new AtomicReference<>();
    final AtomicReference<Throwable> exceptionSpy = new AtomicReference<>();

    FutureCallback<Object> successCallBack = new FutureCallback<Object>() {
        @Override//w  w w . jav  a 2s  . c o m
        public void onSuccess(Object result) {
            successSpy.getAndSet(result);
            latch.countDown();
        }

        @Override
        public void onFailure(Throwable t) {
            latch.countDown();
        }
    };

    FutureCallback<Object> errorCallBack = new FutureCallback<Object>() {
        @Override
        public void onSuccess(Object result) {
            latch.countDown();
        }

        @Override
        public void onFailure(Throwable t) {
            exceptionSpy.getAndSet(t);
            latch.countDown();
        }
    };

    // Start batch
    AsyncBatch batch = asyncManager.createBatch();
    batch.startBatch();

    batch.insert(bean);
    batch.insert(tweet1);
    batch.insert(tweet2);
    batch.insert(user);

    CompleteBean foundBean = asyncManager.find(CompleteBean.class, bean.getId()).getImmediately();
    Tweet foundTweet1 = asyncManager.find(Tweet.class, tweet1.getId()).getImmediately();
    Tweet foundTweet2 = asyncManager.find(Tweet.class, tweet2.getId()).getImmediately();
    User foundUser = asyncManager.find(User.class, user.getId()).getImmediately();

    assertThat(foundBean).isNull();
    assertThat(foundTweet1).isNull();
    assertThat(foundTweet2).isNull();
    assertThat(foundUser).isNull();

    // Flush
    batch.asyncEndBatch(successCallBack, errorCallBack);

    latch.await();

    final ResultSet resultSet = asyncManager.getNativeSession().execute(
            "SELECT id,favoriteTweets,followers,friends,age_in_years,name,welcomeTweet,label,preferences FROM CompleteBean WHERE id=:id",
            bean.getId());
    assertThat(resultSet.all()).hasSize(1);

    foundBean = asyncManager.find(CompleteBean.class, bean.getId()).getImmediately();
    foundTweet1 = asyncManager.find(Tweet.class, tweet1.getId()).getImmediately();
    foundTweet2 = asyncManager.find(Tweet.class, tweet2.getId()).getImmediately();
    foundUser = asyncManager.find(User.class, user.getId()).getImmediately();

    assertThat(foundBean.getName()).isEqualTo("name");
    assertThat(foundTweet1.getContent()).isEqualTo("tweet1");
    assertThat(foundTweet2.getContent()).isEqualTo("tweet2");
    assertThat(foundUser.getFirstname()).isEqualTo("fn");
    assertThat(foundUser.getLastname()).isEqualTo("ln");
    assertThatBatchContextHasBeenReset(batch);

    assertThat(successSpy.get()).isNotNull().isSameAs(Empty.INSTANCE);
    assertThat(exceptionSpy.get()).isNull();
}

From source file:org.apache.storm.cluster.StormClusterStateImpl.java

protected void issueCallback(AtomicReference<Runnable> cb) {
    Runnable callback = cb.getAndSet(null);
    if (callback != null)
        callback.run();/*  w  w  w. j a  va2 s  .  c  o  m*/
}

From source file:com.netflix.curator.framework.recipes.queue.DistributedQueue.java

boolean internalPut(final T item, MultiItem<T> multiItem, String path, int maxWait, TimeUnit unit)
        throws Exception {
    if (!blockIfMaxed(maxWait, unit)) {
        return false;
    }//from   w  w  w  .j  a va2 s  .  co m

    final MultiItem<T> givenMultiItem = multiItem;
    if (item != null) {
        final AtomicReference<T> ref = new AtomicReference<T>(item);
        multiItem = new MultiItem<T>() {
            @Override
            public T nextItem() throws Exception {
                return ref.getAndSet(null);
            }
        };
    }

    putCount.incrementAndGet();
    byte[] bytes = ItemSerializer.serialize(multiItem, serializer);
    if (putInBackground) {
        doPutInBackground(item, path, givenMultiItem, bytes);
    } else {
        doPutInForeground(item, path, givenMultiItem, bytes);
    }
    return true;
}