Example usage for java.util.concurrent.atomic AtomicReference set

List of usage examples for java.util.concurrent.atomic AtomicReference set

Introduction

In this page you can find the example usage for java.util.concurrent.atomic AtomicReference set.

Prototype

public final void set(V newValue) 

Source Link

Document

Sets the value to newValue , with memory effects as specified by VarHandle#setVolatile .

Usage

From source file:io.restassured.module.mockmvc.ContentTypeTest.java

@Test
public void adds_specific_charset_to_content_type_by_default() {
    final AtomicReference<String> contentType = new AtomicReference<String>();

    RestAssuredMockMvc.given().standaloneSetup(new GreetingController())
            .config(RestAssuredMockMvc.config()
                    .encoderConfig(EncoderConfig.encoderConfig().defaultCharsetForContentType(
                            StandardCharsets.UTF_16.toString(), ContentType.JSON)))
            .contentType(ContentType.JSON).interceptor(new MockHttpServletRequestBuilderInterceptor() {
                public void intercept(MockHttpServletRequestBuilder requestBuilder) {
                    MultiValueMap<String, Object> headers = Whitebox.getInternalState(requestBuilder,
                            "headers");
                    contentType.set(String.valueOf(headers.getFirst("Content-Type")));
                }//www . j  a va2 s. com
            }).when().get("/greeting?name={name}", "Johan").then().statusCode(200);

    assertThat(contentType.get()).isEqualTo("application/json;charset=" + StandardCharsets.UTF_16.toString());
    assertThat(contentType.get())
            .doesNotContain(RestAssuredMockMvc.config().getEncoderConfig().defaultContentCharset());
}

From source file:com.twitter.aurora.scheduler.app.SchedulerIT.java

@Test
public void testLaunch() throws Exception {
    expect(driverFactory.apply(null)).andReturn(driver).anyTimes();

    ScheduledTask snapshotTask = makeTask("snapshotTask", ScheduleStatus.ASSIGNED);
    ScheduledTask transactionTask = makeTask("transactionTask", ScheduleStatus.RUNNING);
    Iterable<Entry> recoveredEntries = toEntries(
            LogEntry.snapshot(new Snapshot().setTasks(ImmutableSet.of(snapshotTask))),
            LogEntry.transaction(new Transaction(
                    ImmutableList.of(Op.saveTasks(new SaveTasks(ImmutableSet.of(transactionTask)))),
                    storageConstants.CURRENT_SCHEMA_VERSION)));

    expect(log.open()).andReturn(logStream);
    expect(logStream.readAll()).andReturn(recoveredEntries.iterator()).anyTimes();
    // An empty saveTasks is an artifact of the fact that mutateTasks always writes a log operation
    // even if nothing is changed.
    streamMatcher.expectTransaction(Op.saveTasks(new SaveTasks(ImmutableSet.<ScheduledTask>of())))
            .andReturn(nextPosition());// w  ww  . ja  v a 2 s.c o  m
    streamMatcher.expectTransaction(Op.saveFrameworkId(new SaveFrameworkId(FRAMEWORK_ID)))
            .andReturn(nextPosition());

    logStream.close();
    expectLastCall().anyTimes();

    final AtomicReference<Scheduler> scheduler = Atomics.newReference();
    expect(driver.start()).andAnswer(new IAnswer<Status>() {
        @Override
        public Status answer() {
            scheduler.get().registered(driver, FrameworkID.newBuilder().setValue(FRAMEWORK_ID).build(),
                    MasterInfo.getDefaultInstance());
            return Status.DRIVER_RUNNING;
        }
    });

    control.replay();
    startScheduler();

    scheduler.set(getScheduler());
    awaitSchedulerReady();

    assertEquals(0L, Stats.<Long>getVariable("task_store_PENDING").read().longValue());
    assertEquals(1L, Stats.<Long>getVariable("task_store_ASSIGNED").read().longValue());
    assertEquals(1L, Stats.<Long>getVariable("task_store_RUNNING").read().longValue());
    assertEquals(0L, Stats.<Long>getVariable("task_store_UNKNOWN").read().longValue());

    // TODO(William Farner): Send a thrift RPC to the scheduler.
    // TODO(William Farner): Also send an admin thrift RPC to verify capability (e.g. ROOT) mapping.
}

From source file:de.codesourcery.jasm16.parser.InstructionNodeTest.java

public void testParseNestedImmediateExpression() throws Exception {

    final String source = "SET I, 4+5*3";

    final ICompilationUnit unit = CompilationUnit.createInstance("string input", source);

    final IParseContext context = createParseContext(source);

    final ASTNode result = new InstructionNode().parse(context);
    assertFalse(getErrors(source, result), result.hasErrors());
    assertEquals(InstructionNode.class, result.getClass());

    final InstructionNode instruction = (InstructionNode) result;

    final AtomicReference<byte[]> objcode = new AtomicReference<byte[]>();

    final IObjectCodeWriter writer = new IObjectCodeWriter() {

        @Override//w w w  .j  a  v  a  2  s .c  o m
        public void close() throws IOException {
        }

        @Override
        public void writeObjectCode(byte[] data, int offset, int length) throws IOException {
            objcode.set(ArrayUtils.subarray(data, offset, offset + length));
        }

        @Override
        public void writeObjectCode(byte[] data) throws IOException {
            writeObjectCode(data, 0, data.length);
        }

        @Override
        public void deleteOutput() throws IOException {
            // TODO Auto-generated method stub

        }

        @Override
        public Address getCurrentWriteOffset() {
            return Address.ZERO;
        }

        @Override
        public Address getFirstWriteOffset() {
            return Address.ZERO;
        }

        @Override
        public void advanceToWriteOffset(Address offset) throws IOException {
            throw new UnsupportedOperationException("Not implemented");
        }
    };

    final ICompilationContext compContext = createCompilationContext(unit);

    final OperandNode operand = instruction.getOperand(1);
    final TermNode oldExpression = (TermNode) operand.child(0);
    final TermNode newExpression = oldExpression.reduce(compContext);
    if (newExpression != oldExpression) {
        operand.setChild(0, newExpression);
    }
    instruction.symbolsResolved(compContext);
    instruction.writeObjectCode(writer, compContext);
    assertNotNull(objcode.get());
    assertEquals(source, toSourceCode(result, source));
}

From source file:it.ecubecenter.processors.sentiment.SentimentAnalyzer.java

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final ComponentLog log = getLogger();
    final AtomicReference<String> atomicStringToAnalyze = new AtomicReference<>();

    FlowFile flowFile = session.get();/*from   w  ww .ja  va 2 s.c o m*/
    if (flowFile == null) {
        return;
    }
    String attributeToBeUsed = context.getProperty(ATTRIBUTE_TO_ANALYZE_PROPERTY).getValue();
    if (attributeToBeUsed == null || attributeToBeUsed.equals("")) {
        attributeToBeUsed = "";
        log.info("Start reading the flow file content in order to perform the sentiment analysis.");
        session.read(flowFile, new InputStreamCallback() {

            @Override
            public void process(InputStream in) throws IOException {
                atomicStringToAnalyze.set(IOUtils.toString(in));
            }
        });
    } else {
        log.info("Getting the content of attribute " + attributeToBeUsed
                + "in order to perform the sentiment analysis.");
        atomicStringToAnalyze.set(flowFile.getAttribute(attributeToBeUsed));
    }
    String stringToAnalyze = atomicStringToAnalyze.get();
    if (stringToAnalyze == null || stringToAnalyze.equals("")) {
        log.warn("The attribute to be analyzed doesn't exist or it is empty.");
        session.transfer(flowFile, FAILURE_RELATIONSHIP);
        return;
    }

    SentimentModel model = SentimentModel.getInstance();

    List<double[]> sentiments = model.getSentencesSentiment(stringToAnalyze);
    flowFile = session.putAttribute(flowFile, attributeToBeUsed + ".sentiment.category",
            SentimentModel.getOverallSentiment(sentiments));
    flowFile = session.putAttribute(flowFile, attributeToBeUsed + ".sentiment.sentences.scores",
            stringifyListOfSentiments(sentiments));

    session.transfer(flowFile, SUCCESS_RELATIONSHIP);
}

From source file:io.fabric8.maven.core.service.openshift.OpenshiftBuildService.java

private Watcher<Build> getBuildWatcher(final CountDownLatch latch, final String buildName,
        final AtomicReference<Build> buildHolder) {
    return new Watcher<Build>() {

        String lastStatus = "";

        @Override/*from w  w w . ja v  a2 s .  com*/
        public void eventReceived(Action action, Build build) {
            buildHolder.set(build);
            String status = KubernetesResourceUtil.getBuildStatusPhase(build);
            log.verbose("BuildWatch: Received event %s , build status: %s", action, build.getStatus());
            if (!lastStatus.equals(status)) {
                lastStatus = status;
                log.verbose("Build %s status: %s", buildName, status);
            }
            if (Builds.isFinished(status)) {
                latch.countDown();
            }
        }

        @Override
        public void onClose(KubernetesClientException cause) {
            if (cause != null) {
                log.error("Error while watching for build to finish: %s [%d]", cause.getMessage(),
                        cause.getCode());
                Status status = cause.getStatus();
                if (status != null) {
                    log.error("%s [%s]", status.getReason(), status.getStatus());
                }
            }
            latch.countDown();
        }
    };
}

From source file:com.netflix.curator.framework.recipes.cache.TestPathChildrenCache.java

@Test
public void testDeleteThenCreate() throws Exception {
    CuratorFramework client = CuratorFrameworkFactory.newClient(server.getConnectString(), new RetryOneTime(1));
    client.start();/*from   ww  w  .j  a va2  s  .  c  o  m*/
    try {
        client.create().forPath("/test");
        client.create().forPath("/test/foo", "one".getBytes());

        final AtomicReference<Throwable> error = new AtomicReference<Throwable>();
        client.getUnhandledErrorListenable().addListener(new UnhandledErrorListener() {
            @Override
            public void unhandledError(String message, Throwable e) {
                error.set(e);
            }
        });

        final CountDownLatch removedLatch = new CountDownLatch(1);
        final CountDownLatch postRemovedLatch = new CountDownLatch(1);
        final CountDownLatch dataLatch = new CountDownLatch(1);
        PathChildrenCache cache = new PathChildrenCache(client, "/test", true);
        cache.getListenable().addListener(new PathChildrenCacheListener() {
            @Override
            public void childEvent(CuratorFramework client, PathChildrenCacheEvent event) throws Exception {
                if (event.getType() == PathChildrenCacheEvent.Type.CHILD_REMOVED) {
                    removedLatch.countDown();
                    Assert.assertTrue(postRemovedLatch.await(10, TimeUnit.SECONDS));
                } else {
                    try {
                        Assert.assertEquals(event.getData().getData(), "two".getBytes());
                    } finally {
                        dataLatch.countDown();
                    }
                }
            }
        });
        cache.start(PathChildrenCache.StartMode.BUILD_INITIAL_CACHE);

        client.delete().forPath("/test/foo");
        Assert.assertTrue(removedLatch.await(10, TimeUnit.SECONDS));
        client.create().forPath("/test/foo", "two".getBytes());
        postRemovedLatch.countDown();
        Assert.assertTrue(dataLatch.await(10, TimeUnit.SECONDS));

        Throwable t = error.get();
        if (t != null) {
            Assert.fail("Assert", t);
        }

        cache.close();
    } finally {
        client.close();
    }
}

From source file:org.zodiark.subscriber.SubscriberTest.java

@Test(enabled = false)
public void createSessionTest() throws IOException, InterruptedException {
    final AtomicReference<SubscriberResults> answer = new AtomicReference<>();
    final ZodiarkClient publisherClient = new ZodiarkClient.Builder().path("http://127.0.0.1:" + port).build();
    final CountDownLatch latch = new CountDownLatch(1);

    publisherClient.handler(new OnEnvelopHandler() {
        @Override// w  w  w. j a v  a2s .  c  om
        public boolean onEnvelop(Envelope e) throws IOException {
            answer.set(mapper.readValue(e.getMessage().getData(), SubscriberResults.class));
            latch.countDown();
            return true;
        }
    }).open();

    Envelope createSessionMessage = Envelope
            .newClientToServerRequest(new Message(new Path(Paths.DB_POST_SUBSCRIBER_SESSION_CREATE),
                    mapper.writeValueAsString(new UserPassword("foo", "bar"))));
    createSessionMessage.setFrom(new From(ActorValue.SUBSCRIBER));
    publisherClient.send(createSessionMessage);
    latch.await();
    assertEquals("OK", answer.get().getResults());
}

From source file:io.restassured.itest.java.FilterITest.java

@Test
public void content_type_in_filter_doesnt_contain_charset_if_configured_not_to() {
    final AtomicReference<String> contentType = new AtomicReference<String>();

    given().config(RestAssuredConfig.config()
            .encoderConfig(encoderConfig().appendDefaultContentCharsetToContentTypeIfUndefined(false)))
            .filter(new Filter() {
                public Response filter(FilterableRequestSpecification requestSpec,
                        FilterableResponseSpecification responseSpec, FilterContext ctx) {
                    contentType.set(requestSpec.getContentType());
                    return ctx.next(requestSpec, responseSpec);
                }//from   www . ja  v a2 s  .  co m
            }).formParam("firstName", "John").formParam("lastName", "Doe").when().post("/greet").then()
            .statusCode(200);

    assertThat(contentType.get(), equalTo("application/x-www-form-urlencoded"));
}

From source file:com.sysunite.nifi.XmlSplit.java

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {

    FlowFile flowFile = session.get();/*from   ww  w  . ja va  2  s .c  o  m*/

    if (flowFile == null) {
        return;
    }

    //read the flow file and save it contents
    final AtomicReference<InputStream> theXml = new AtomicReference<>();

    session.read(flowFile, new InputStreamCallback() {

        @Override
        public void process(InputStream isIn) throws IOException {

            //System.out.println("contact!");

            try {

                String contents = IOUtils.toString(isIn);

                XML xmlNode = new XMLDocument(contents);

                InputStream is = new ByteArrayInputStream(xmlNode.toString().getBytes());

                theXml.set(is);

            } catch (IOException e) {
                System.out.println("w00t");// + e.getMessage());
            } catch (IllegalArgumentException e) {
                System.out.println("is xml niet geldig?");
            } catch (IndexOutOfBoundsException e) {
                System.out.println("bah! de node waar gezocht naar moet worden is niet gevonden!");
            }

        }
    });

    //fetch the xml content again

    try {

        InputStream orig_xml = theXml.get();

        String xml_contents = IOUtils.toString(orig_xml);

        try {

            //loop through the relations and get each value (xpath)
            final Map<Relationship, PropertyValue> propMap = propertyMap;

            for (final Map.Entry<Relationship, PropertyValue> entry : propMap.entrySet()) {

                final PropertyValue pv = entry.getValue();
                String xpath = pv.getValue();

                final Relationship rel = entry.getKey();
                String relName = rel.getName();

                if (xpath != null) {

                    System.out.println(xpath);

                    //create new xml
                    XML file = new XMLDocument(xml_contents);

                    //if we want an attribute of a node
                    //we reconize the monkeytail in xpath i.e. /Node/@id - Route On Attribute (ori FileContent not changed)
                    if (xpath.matches("(.*)\u0040(.*)")) {
                        String v = file.xpath(xpath).get(0);
                        //System.out.println(v);

                        FlowFile fNew = session.clone(flowFile);
                        //create attribute
                        fNew = session.putAttribute(fNew, relName, v);
                        //transfer
                        session.transfer(fNew, rel);

                    } else {

                        //extract all nodes and transfer them to the appropriate relation - Route On Content (ori FileContent changed)
                        for (XML ibr : file.nodes(xpath)) {

                            //              System.out.println("match!");
                            //              System.out.println(ibr.toString());

                            FlowFile fNew = session.clone(flowFile);
                            //create attribute
                            //fNew = session.putAttribute(fNew, relName, ibr.toString());

                            InputStream in = new ByteArrayInputStream(ibr.toString().getBytes());

                            fNew = session.importFrom(in, fNew);
                            //transfer
                            session.transfer(fNew, rel);

                        }

                    }

                }

            }

        } catch (IllegalArgumentException e) {
            System.out.println("is xml niet geldig?");
        } catch (IndexOutOfBoundsException e) {
            System.out.println("bah! de node waar gezocht naar moet worden is niet gevonden!");
        }

    } catch (IOException e) {
        System.out.println("cannot read xml");
    }

    session.transfer(flowFile, ORIGINAL);

}

From source file:org.apache.flink.streaming.connectors.kinesis.testutils.KinesisEventsGeneratorProducerThread.java

public static Thread create(final int totalEventCount, final int parallelism, final String awsAccessKey,
        final String awsSecretKey, final String awsRegion, final String kinesisStreamName,
        final AtomicReference<Throwable> errorHandler, final int flinkPort, final Configuration flinkConfig) {
    Runnable kinesisEventsGeneratorProducer = new Runnable() {
        @Override//from  w w  w  .  jav  a 2s. c  o  m
        public void run() {
            try {
                StreamExecutionEnvironment see = StreamExecutionEnvironment.createRemoteEnvironment("localhost",
                        flinkPort, flinkConfig);
                see.setParallelism(parallelism);

                // start data generator
                DataStream<String> simpleStringStream = see
                        .addSource(new KinesisEventsGeneratorProducerThread.EventsGenerator(totalEventCount))
                        .setParallelism(1);

                Properties producerProps = new Properties();
                producerProps.setProperty(AWSConfigConstants.AWS_ACCESS_KEY_ID, awsAccessKey);
                producerProps.setProperty(AWSConfigConstants.AWS_SECRET_ACCESS_KEY, awsSecretKey);
                producerProps.setProperty(AWSConfigConstants.AWS_REGION, awsRegion);

                FlinkKinesisProducer<String> kinesis = new FlinkKinesisProducer<>(new SimpleStringSchema(),
                        producerProps);

                kinesis.setFailOnError(true);
                kinesis.setDefaultStream(kinesisStreamName);
                kinesis.setDefaultPartition("0");
                simpleStringStream.addSink(kinesis);

                LOG.info("Starting producing topology");
                see.execute("Producing topology");
                LOG.info("Producing topo finished");
            } catch (Exception e) {
                LOG.warn("Error while running producing topology", e);
                errorHandler.set(e);
            }
        }
    };

    return new Thread(kinesisEventsGeneratorProducer);
}