Example usage for java.util.concurrent.atomic AtomicBoolean set

List of usage examples for java.util.concurrent.atomic AtomicBoolean set

Introduction

In this page you can find the example usage for java.util.concurrent.atomic AtomicBoolean set.

Prototype

public final void set(boolean newValue) 

Source Link

Document

Sets the value to newValue , with memory effects as specified by VarHandle#setVolatile .

Usage

From source file:com.nridge.connector.common.con_com.crawl.CrawlQueue.java

/**
 * Evaluates if the phase has completed its processing cycle.  A phase
 * is considered complete if the application is no longer alive or the
 * queue item represents a crawl finish or abort marker.
 *
 * @param aPhase Name of the phase being evaluated (used for logging).
 * @param aQueueItem Queue item./* ww w. ja  v a2 s  .c  o  m*/
 *
 * @return <i>true</i> or <i>false</i>
 */
public boolean isPhaseComplete(String aPhase, String aQueueItem) {
    boolean isPhaseAlreadyComplete;
    Logger appLogger = mAppMgr.getLogger(this, "isPhaseComplete");

    appLogger.trace(mAppMgr.LOGMSG_TRACE_ENTER);

    AtomicBoolean atomicBoolean = mPhaseComplete.get(aPhase);
    if (atomicBoolean == null) {
        isPhaseAlreadyComplete = true;
        appLogger.error(String.format("Phase name '%s' atomic boolean is null.", aPhase));
    } else
        isPhaseAlreadyComplete = atomicBoolean.get();

    boolean appMgrIsAlive = mAppMgr.isAlive();
    boolean queueItemIsValid = isQueueItemValid(aQueueItem);
    boolean queueIsAborted = StringUtils.equals(aQueueItem, Connector.QUEUE_ITEM_CRAWL_ABORT);
    boolean queueIsFinished = StringUtils.equals(aQueueItem, Connector.QUEUE_ITEM_CRAWL_FINISH);

    boolean isComplete = ((!appMgrIsAlive) || (isPhaseAlreadyComplete) || (queueIsAborted)
            || (queueIsFinished));

    if (isComplete) {
        if ((atomicBoolean != null) && (!atomicBoolean.get()))
            atomicBoolean.set(true);
        appLogger.debug(String.format(
                "Phase Complete %s: queueItemIsValid = %s, isPhaseAlreadyComplete = %s, appMgrIsAlive = %s, queueIsAborted = %s, queueIsFinished = %s",
                aPhase, queueItemIsValid, isPhaseAlreadyComplete, appMgrIsAlive, queueIsAborted,
                queueIsFinished));
    } else
        appLogger.debug(String.format(
                "Phase Continue %s: queueItemIsValid = %s, isPhaseAlreadyComplete = %s, appMgrIsAlive = %s, queueIsAborted = %s, queueIsFinished = %s",
                aPhase, queueItemIsValid, isPhaseAlreadyComplete, appMgrIsAlive, queueIsAborted,
                queueIsFinished));

    appLogger.trace(mAppMgr.LOGMSG_TRACE_DEPART);

    return isComplete;
}

From source file:org.apache.tinkerpop.gremlin.groovy.jsr223.GremlinGroovyScriptEngineTest.java

@Test
public void shouldReloadClassLoaderWhileDoingEvalInSeparateThread() throws Exception {
    final AtomicBoolean fail = new AtomicBoolean(false);
    final AtomicInteger counter = new AtomicInteger(0);
    final CountDownLatch latch = new CountDownLatch(1);
    final AtomicReference<Color> color = new AtomicReference<>(Color.RED);

    final GremlinGroovyScriptEngine scriptEngine = new GremlinGroovyScriptEngine();

    try {/* w  w  w  .j  ava  2s .  co  m*/
        scriptEngine.eval("Color.BLACK");
        fail("Should fail as class is not yet imported");
    } catch (ScriptException se) {
        // should get here as Color.BLACK is not imported yet.
        logger.info("Failed to execute Color.BLACK as expected.");
    }

    final Thread evalThread = new Thread(() -> {
        try {
            // execute scripts until the other thread releases this latch (i.e. after import)
            while (latch.getCount() == 1) {
                scriptEngine.eval("1+1");
                counter.incrementAndGet();
            }

            color.set((Color) scriptEngine.eval("Color.BLACK"));
        } catch (Exception se) {
            fail.set(true);
        }
    }, "test-reload-classloader-1");

    evalThread.start();

    // let the first thread execute a bit.
    Thread.sleep(1000);

    final Thread importThread = new Thread(() -> {
        logger.info("Importing java.awt.Color...");
        final Set<String> imports = new HashSet<String>() {
            {
                add("import java.awt.Color");
            }
        };
        scriptEngine.addImports(imports);
        latch.countDown();
    }, "test-reload-classloader-2");

    importThread.start();

    // block until both threads are done
    importThread.join();
    evalThread.join();

    assertEquals(Color.BLACK, color.get());
    assertThat(counter.get(), greaterThan(0));
    assertFalse(fail.get());
}

From source file:com.netflix.curator.TestSessionFailRetryLoop.java

@Test
public void testRetry() throws Exception {
    Timing timing = new Timing();
    final CuratorZookeeperClient client = new CuratorZookeeperClient(server.getConnectString(),
            timing.session(), timing.connection(), null, new RetryOneTime(1));
    SessionFailRetryLoop retryLoop = client.newSessionFailRetryLoop(SessionFailRetryLoop.Mode.RETRY);
    retryLoop.start();/*from  w ww.j  a v  a2 s .  c  om*/
    try {
        client.start();
        final AtomicBoolean secondWasDone = new AtomicBoolean(false);
        final AtomicBoolean firstTime = new AtomicBoolean(true);
        while (retryLoop.shouldContinue()) {
            try {
                RetryLoop.callWithRetry(client, new Callable<Void>() {
                    @Override
                    public Void call() throws Exception {
                        if (firstTime.compareAndSet(true, false)) {
                            Assert.assertNull(client.getZooKeeper().exists("/foo/bar", false));
                            KillSession.kill(client.getZooKeeper(), server.getConnectString());
                            client.getZooKeeper();
                            client.blockUntilConnectedOrTimedOut();
                        }

                        Assert.assertNull(client.getZooKeeper().exists("/foo/bar", false));
                        return null;
                    }
                });

                RetryLoop.callWithRetry(client, new Callable<Void>() {
                    @Override
                    public Void call() throws Exception {
                        Assert.assertFalse(firstTime.get());
                        Assert.assertNull(client.getZooKeeper().exists("/foo/bar", false));
                        secondWasDone.set(true);
                        return null;
                    }
                });
            } catch (Exception e) {
                retryLoop.takeException(e);
            }
        }

        Assert.assertTrue(secondWasDone.get());
    } finally {
        retryLoop.close();
        IOUtils.closeQuietly(client);
    }
}

From source file:com.qubole.quark.planner.parser.SqlQueryParser.java

public SqlQueryParserResult parse(String sql) throws SQLException {
    DataSourceSchema dataSource = this.context.getDefaultDataSource();
    final AtomicBoolean foundNonQuarkScan = new AtomicBoolean(false);
    final ImmutableSet.Builder<DataSourceSchema> dsBuilder = new ImmutableSet.Builder<>();
    try {/*from ww w  .  j av a2s.com*/
        final SqlKind kind = getSqlParser(sql).parseQuery().getKind();
        SqlQueryParserResult result = new SqlQueryParserResult(stripNamespace(sql, dataSource), dataSource,
                kind, null, false);
        RelNode relNode = parseInternal(sql);
        final RelVisitor relVisitor = new RelVisitor() {
            @Override
            public void visit(RelNode node, int ordinal, RelNode parent) {
                if (node instanceof QuarkViewScan) {
                    visitQuarkViewScan((QuarkViewScan) node);
                } else if (node instanceof QuarkTileScan) {
                    visitQuarkTileScan((QuarkTileScan) node);
                } else if (node instanceof TableScan) {
                    visitNonQuarkScan((TableScan) node);
                }
                super.visit(node, ordinal, parent);
            }

            private void visitNonQuarkScan(TableScan node) {
                foundNonQuarkScan.set(true);
                final String schemaName = node.getTable().getQualifiedName().get(0);
                CalciteSchema schema = CalciteSchema.from(getRootSchma()).getSubSchema(schemaName, false);
                dsBuilder.addAll(getDrivers(schema));
            }

            private void visitQuarkTileScan(QuarkTileScan node) {
                QuarkTile quarkTile = node.getQuarkTile();
                CalciteCatalogReader calciteCatalogReader = new CalciteCatalogReader(
                        CalciteSchema.from(getRootSchma()), false, context.getDefaultSchemaPath(),
                        getTypeFactory());
                CalciteSchema tileSchema = calciteCatalogReader.getTable(quarkTile.tableName)
                        .unwrap(CalciteSchema.class);
                dsBuilder.addAll(getDrivers(tileSchema));
            }

            private void visitQuarkViewScan(QuarkViewScan node) {
                QuarkTable table = node.getQuarkTable();
                if (table instanceof QuarkViewTable) {
                    final CalciteSchema tableSchema = ((QuarkViewTable) table).getBackupTableSchema();
                    dsBuilder.addAll(getDrivers(tableSchema));
                }
            }

            private ImmutableSet<DataSourceSchema> getDrivers(CalciteSchema tableSchema) {
                final ImmutableSet.Builder<DataSourceSchema> dsBuilder = new ImmutableSet.Builder<>();
                SchemaPlus tableSchemaPlus = tableSchema.plus();
                while (tableSchemaPlus != null) {
                    Schema schema = CalciteSchema.from(tableSchemaPlus).schema;
                    if (schema instanceof DataSourceSchema) {
                        dsBuilder.add((DataSourceSchema) schema);
                    }
                    tableSchemaPlus = tableSchemaPlus.getParentSchema();
                }
                return dsBuilder.build();
            }

        };

        relVisitor.go(relNode);

        ImmutableSet<DataSourceSchema> dataSources = dsBuilder.build();

        if (!foundNonQuarkScan.get() && dataSources.size() == 1) {
            /**
             * Check if query is completely optimized for a data source
             */
            final DataSourceSchema newDataSource = dataSources.asList().get(0);
            final SqlDialect dialect = newDataSource.getDataSource().getSqlDialect();
            final String parsedSql = getParsedSql(relNode, dialect);
            result = new SqlQueryParserResult(parsedSql, newDataSource, kind, relNode, true);
        } else if (foundNonQuarkScan.get() && dataSources.size() == 1) {
            /**
             * Check if its not optimized
             */
            final DataSourceSchema newDataSource = dataSources.asList().get(0);
            final String stripNamespace = stripNamespace(sql, newDataSource);
            result = new SqlQueryParserResult(stripNamespace, newDataSource, kind, relNode, true);
        } else if (this.context.isUnitTestMode()) {
            String parsedSql = getParsedSql(relNode,
                    new SqlDialect(SqlDialect.DatabaseProduct.UNKNOWN, "UNKNOWN", null, true));
            result = new SqlQueryParserResult(parsedSql, null, kind, relNode, true);
        } else if (dataSources.size() > 1) {
            /**
             * Check if it's partially optimized, i.e., tablescans of multiple datasources
             * are found in RelNode. We currently donot support multiple datasources.
             */
            throw new SQLException("Federation between data sources is not allowed", "0A001");
        } else if (dataSources.isEmpty()) {
            throw new SQLException("No dataSource found for query", "3D001");
        }
        return result;
    } catch (SQLException e) {
        throw e;
    } catch (Exception e) {
        throw new SQLException(e);
    }
}

From source file:com.facebook.RequestTests.java

@LargeTest
public void testShareOpenGraphContentWithBadType() throws Exception {
    ShareOpenGraphObject ogObject = new ShareOpenGraphObject.Builder().putString("og:title", "a title")
            .putString("og:type", TEST_OG_OBJECT_TYPE).putString("og:description", "a description").build();

    ShareOpenGraphAction ogAction = new ShareOpenGraphAction.Builder()
            .setActionType(TEST_OG_ACTION_TYPE + "bad").putObject("test", ogObject).build();

    ShareOpenGraphContent content = new ShareOpenGraphContent.Builder().setAction(ogAction)
            .setPreviewPropertyName("test").build();

    final ShareApi shareApi = new ShareApi(content);
    final AtomicReference<String> actionId = new AtomicReference<>(null);
    final AtomicBoolean errorOccurred = new AtomicBoolean(false);

    getActivity().runOnUiThread(new Runnable() {
        @Override//from ww w.j  a va 2  s. c  o m
        public void run() {
            shareApi.share(new FacebookCallback<Sharer.Result>() {
                @Override
                public void onSuccess(Sharer.Result result) {
                    actionId.set(result.getPostId());
                    notifyShareFinished();
                }

                @Override
                public void onCancel() {
                    notifyShareFinished();
                }

                @Override
                public void onError(FacebookException error) {
                    errorOccurred.set(true);
                    notifyShareFinished();
                }

                private void notifyShareFinished() {
                    synchronized (shareApi) {
                        shareApi.notifyAll();
                    }
                }
            });
        }
    });

    synchronized (shareApi) {
        shareApi.wait(REQUEST_TIMEOUT_MILLIS);
    }
    assertNull(actionId.get());
    assertTrue(errorOccurred.get());
}

From source file:nl.knaw.huc.di.tag.tagml.importer.TAGMLListener.java

private boolean nameContextIsValid(final ParserRuleContext ctx, final NameContext nameContext,
        final LayerInfoContext layerInfoContext) {
    AtomicBoolean valid = new AtomicBoolean(true);
    if (layerInfoContext != null) {
        layerInfoContext.layerName().stream().map(LayerNameContext::getText).forEach(lid -> {
            //            if (!document.getLayerNames().contains(lid)) {
            //              valid.set(false);
            //              errorListener.addError(
            //                  "%s Layer %s is undefined at this point.",
            //                  errorPrefix(ctx), lid);
            //            }
        });// w  w  w.j a  v a 2  s .  c  om
    }

    if (nameContext == null || nameContext.getText().isEmpty()) {
        errorListener.addError("%s Nameless markup is not allowed here.", errorPrefix(ctx));
        valid.set(false);
    }
    return valid.get();
}

From source file:com.photon.maven.plugins.android.AbstractAndroidMojo.java

/**
 * Undeploys an apk, specified by package name, from a connected emulator or
 * usb device. Also deletes the application's data and cache directories on
 * the device.//  w ww.  j a  va  2 s. co  m
 * 
 * @param packageName
 *            the package name to undeploy.
 * @return <code>true</code> if successfully undeployed, <code>false</code>
 *         otherwise.
 */
protected boolean undeployApk(final String packageName) throws MojoExecutionException, MojoFailureException {

    final AtomicBoolean result = new AtomicBoolean(true); // if no devices
    // are present,
    // it counts as
    // successful

    doWithDevices(new DeviceCallback() {
        @Override
        public void doWithDevice(final IDevice device) throws MojoExecutionException {
            try {
                device.uninstallPackage(packageName);
                getLog().info("Successfully uninstalled " + packageName + " from "
                        + DeviceHelper.getDescriptiveName(device));
                result.set(true);
            } catch (InstallException e) {
                result.set(false);
                throw new MojoExecutionException("Uninstall of " + packageName + "failed.", e);
            }
        }
    });

    return result.get();
}

From source file:fr.mby.saml2.sp.opensaml.core.OpenSaml20SpProcessorTest.java

@Test
public void testTryAuthenticationPropagation() throws Exception {

    final IIncomingSaml incomingSaml = Mockito.mock(IIncomingSaml.class);
    final QueryAuthnResponse queryAuthnResponse = Mockito.mock(QueryAuthnResponse.class);
    final List<IAuthentication> authns = new ArrayList<IAuthentication>();
    final BasicSamlAuthentication basicAuth = new BasicSamlAuthentication();
    basicAuth.addAttribute(AUTH_ATTR_KEY, AUTH_ATTR_VALUES);
    authns.add(basicAuth);/*from www  .  ja  v a 2 s.co m*/

    Mockito.when(incomingSaml.getSamlQuery()).thenReturn(queryAuthnResponse);
    Mockito.when(queryAuthnResponse.getSamlAuthentications()).thenReturn(authns);

    final AtomicBoolean authPropagated = new AtomicBoolean(false);

    this.spProcessor.setAuthenticationHandler(new IAuthenticationHandler() {

        @Override
        public void propagateAuthentications(List<IAuthentication> authentications) {
            Assert.assertNotNull("No authentications propagated !", authentications);
            Assert.assertEquals("Bad authentications list size !", authns.size(), authentications.size());

            final IAuthentication authn = authentications.iterator().next();
            Assert.assertNotNull("Null authentication attributes list !", authn.getAttributes());
            Assert.assertEquals("Bad authentication attributes list size !", basicAuth.getAttributes().size(),
                    authn.getAttributes().size());

            final List<String> values = authn.getAttribute(AUTH_ATTR_KEY);
            Assert.assertNotNull("No attribute values found in propagated authentications !", values);
            Assert.assertEquals("Bad values list size !", AUTH_ATTR_VALUES.size(), values.size());

            final Iterator<String> valuesIt = values.iterator();
            Assert.assertEquals("Bad first propagated authentication attibutes !", AUTH_ATTR_VALUE_1,
                    valuesIt.next());
            Assert.assertEquals("Bad second propagated authentication attribute value !", AUTH_ATTR_VALUE_2,
                    valuesIt.next());

            authPropagated.set(true);
        }
    });

    this.spProcessor.tryAuthenticationPropagation(incomingSaml);

    Assert.assertTrue("Authentication wasn't propagated !", authPropagated.get());
}

From source file:org.apache.hadoop.hdfs.TestFileConcurrentReader.java

@Test
public void testImmediateReadOfNewFile() throws IOException {
    final int blockSize = 64 * 1024;
    final int writeSize = 10 * blockSize;
    Configuration conf = new Configuration();

    conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize);
    init(conf);//ww w . ja va 2  s . com

    final int requiredSuccessfulOpens = 100;
    final Path file = new Path("/file1");
    final AtomicBoolean openerDone = new AtomicBoolean(false);
    final AtomicReference<String> errorMessage = new AtomicReference<>();
    final FSDataOutputStream out = fileSystem.create(file);

    final Thread writer = new Thread(new Runnable() {
        @Override
        public void run() {
            try {
                while (!openerDone.get()) {
                    out.write(DFSTestUtil.generateSequentialBytes(0, writeSize));
                    out.hflush();
                }
            } catch (IOException e) {
                LOG.warn("error in writer", e);
            } finally {
                try {
                    out.close();
                } catch (IOException e) {
                    LOG.error("unable to close file");
                }
            }
        }
    });

    Thread opener = new Thread(new Runnable() {
        @Override
        public void run() {
            try {
                for (int i = 0; i < requiredSuccessfulOpens; i++) {
                    fileSystem.open(file).close();
                }
                openerDone.set(true);
            } catch (IOException e) {
                openerDone.set(true);
                errorMessage.set(String.format("got exception : %s", StringUtils.stringifyException(e)));
            } catch (Exception e) {
                openerDone.set(true);
                errorMessage.set(String.format("got exception : %s", StringUtils.stringifyException(e)));
                writer.interrupt();
                fail("here");
            }
        }
    });

    writer.start();
    opener.start();

    try {
        writer.join();
        opener.join();
    } catch (InterruptedException e) {
        Thread.currentThread().interrupt();
    }

    assertNull(errorMessage.get(), errorMessage.get());
}

From source file:com.spectralogic.ds3client.integration.Smoke_Test.java

@Test
public void testHelperMetadata() throws IOException, URISyntaxException, XmlProcessingException {
    final String bucketName = "helper_metadata";
    try {/*w  w w  .  j a  v a 2  s  . c o m*/
        HELPERS.ensureBucketExists(bucketName, envDataPolicyId);

        final List<Ds3Object> objects = new ArrayList<>();
        for (final String book : BOOKS) {
            final Path objPath = ResourceUtils.loadFileResource(RESOURCE_BASE_NAME + book);
            final long bookSize = Files.size(objPath);
            final Ds3Object obj = new Ds3Object(book, bookSize);

            objects.add(obj);
        }

        final Ds3ClientHelpers.Job job = HELPERS.startWriteJob(bucketName, objects);

        final AtomicBoolean calledWithMetadata = new AtomicBoolean(false);

        job.withMetadata(new Ds3ClientHelpers.MetadataAccess() {
            @Override
            public Map<String, String> getMetadataValue(final String filename) {
                if (filename.equals("beowulf.txt")) {
                    calledWithMetadata.set(true);
                    return ImmutableMap.of("fileType", "text");
                }

                return null;
            }
        });

        job.transfer(new ResourceObjectPutter(RESOURCE_BASE_NAME));

        assertTrue(calledWithMetadata.get());

        final HeadObjectResponse response = client.headObject(new HeadObjectRequest(bucketName, "beowulf.txt"));
        final Metadata metadata = response.getMetadata();
        final List<String> values = metadata.get("fileType");
        assertThat(values.size(), is(1));
        assertThat(values.get(0), is("text"));

    } finally {
        deleteAllContents(client, bucketName);
    }
}