Example usage for java.util.concurrent.atomic AtomicBoolean AtomicBoolean

List of usage examples for java.util.concurrent.atomic AtomicBoolean AtomicBoolean

Introduction

In this page you can find the example usage for java.util.concurrent.atomic AtomicBoolean AtomicBoolean.

Prototype

public AtomicBoolean() 

Source Link

Document

Creates a new AtomicBoolean with initial value false .

Usage

From source file:com.adjust.sdk.PackageHandler.java

private void initInternal() {
    requestHandler = AdjustFactory.getRequestHandler(this);

    isSending = new AtomicBoolean();

    readPackageQueue();
}

From source file:ch.cyberduck.core.b2.B2LargeUploadServiceTest.java

@Test
public void testAppendNoPartCompleted() throws Exception {
    final B2Session session = new B2Session(new Host(new B2Protocol(), new B2Protocol().getDefaultHostname(),
            new Credentials(System.getProperties().getProperty("b2.user"),
                    System.getProperties().getProperty("b2.key"))));
    session.open(new DisabledHostKeyCallback());
    session.login(new DisabledPasswordStore(), new DisabledLoginCallback(), new DisabledCancelCallback());
    final Path bucket = new Path("test-cyberduck", EnumSet.of(Path.Type.directory, Path.Type.volume));
    final Path test = new Path(bucket, UUID.randomUUID().toString(), EnumSet.of(Path.Type.file));
    final Local local = new Local(System.getProperty("java.io.tmpdir"), UUID.randomUUID().toString());
    final int length = 102 * 1024 * 1024;
    final byte[] content = RandomUtils.nextBytes(length);
    IOUtils.write(content, local.getOutputStream(false));
    final TransferStatus status = new TransferStatus();
    status.setLength(content.length);/*from  w  w  w  .  j  a  v  a  2 s . c o  m*/
    final AtomicBoolean interrupt = new AtomicBoolean();
    final B2LargeUploadService service = new B2LargeUploadService(session, new B2WriteFeature(session),
            100 * 1024L * 1024L, 1);
    try {
        service.upload(test, local, new BandwidthThrottle(BandwidthThrottle.UNLIMITED),
                new DisabledStreamListener() {
                    long count;

                    @Override
                    public void sent(final long bytes) {
                        count += bytes;
                        if (count >= 5 * 1024L * 1024L) {
                            throw new RuntimeException();
                        }
                    }
                }, status, new DisabledLoginCallback());
    } catch (BackgroundException e) {
        // Expected
        interrupt.set(true);
    }
    assertTrue(interrupt.get());
    assertEquals(0L, status.getOffset(), 0L);
    assertFalse(status.isComplete());

    final TransferStatus append = new TransferStatus().append(true).length(content.length);
    service.upload(test, local, new BandwidthThrottle(BandwidthThrottle.UNLIMITED),
            new DisabledStreamListener(), append, new DisabledLoginCallback());
    assertTrue(new B2FindFeature(session).find(test));
    assertEquals(content.length, new B2AttributesFinderFeature(session).find(test).getSize());
    assertEquals(content.length, append.getOffset(), 0L);
    assertTrue(append.isComplete());
    final byte[] buffer = new byte[content.length];
    final InputStream in = new B2ReadFeature(session).read(test, new TransferStatus(),
            new DisabledConnectionCallback());
    IOUtils.readFully(in, buffer);
    in.close();
    assertArrayEquals(content, buffer);
    new B2DeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(),
            new Delete.DisabledCallback());
    local.delete();
    session.close();
}

From source file:com.vmware.upgrade.progress.impl.SimpleAggregatingProgressReporter.java

/**
 * Constructs a reporter which will use specified {@code ExecutionStateAggregator}
 * to aggregate child task states. Initially the reporter has no children.
 * <p>/*from ww  w  .  j  a v a  2s.  c  om*/
 * The client must call the {@link #setChildren} method to set the child tasks.
 *
 * @param stateAggregator
 */
public SimpleAggregatingProgressReporter(final ExecutionStateAggregator stateAggregator) {
    super();

    if (stateAggregator == null) {
        throw new IllegalArgumentException("stateAggregator");
    }

    this.childListeners = new ArrayList<PropagatingListener>();
    this.terminated = new AtomicBoolean();
    this.stateAggregator = stateAggregator;
}

From source file:com.asakusafw.testdriver.inprocess.InProcessJobExecutorTest.java

/**
 * Test method for executing Hadoop job.
 *//*from  ww w . java  2s .  c o  m*/
@Test
public void executeJob_simple() {
    prepareJobflow();
    AtomicBoolean call = new AtomicBoolean();
    MockHadoopJob.callback((args, conf) -> {
        call.set(true);
        return 0;
    });

    JobExecutor executor = new InProcessJobExecutor(context);
    try {
        executor.execute(job(MockHadoopJob.class.getName()), Collections.emptyMap());
    } catch (IOException e) {
        throw new AssertionError(e);
    }
    assertThat(call.get(), is(true));
}

From source file:de.dal33t.powerfolder.test.transfer.BandwidthLimitText.java

public void testBandwidthStats() {
    BandwidthLimiter bl = BandwidthLimiter.LAN_INPUT_BANDWIDTH_LIMITER;
    bl.setAvailable(0);// ww w. j  a  va  2s.  c  om
    provider.start();
    provider.setLimitBPS(bl, 1000);
    final AtomicBoolean gotStat = new AtomicBoolean();
    BandwidthStatsListener listener = new BandwidthStatsListener() {
        public void handleBandwidthStat(BandwidthStat stat) {
            System.out.println("Got a stat...");
            gotStat.set(true);
        }

        public boolean fireInEventDispatchThread() {
            return false;
        }
    };
    provider.addBandwidthStatListener(listener);
    try {
        Thread.sleep(2000);
    } catch (InterruptedException e) {
        fail(e.toString());
    }
    provider.removeLimiter(bl);
    provider.shutdown();
    assertTrue("Failed to get any stats?", gotStat.get());
}

From source file:fr.inria.oak.paxquery.pact.io.XmlConsTreePatternOutputFormat.java

private StringBuilder[][] writeRecord(RecordList listRecords, NestedMetadata signature,
        ConstructionTreePattern[] ctps, AtomicBoolean[] nullResults) throws IOException {
    StringBuilder[][] result = new StringBuilder[listRecords.size()][ctps.length];
    for (int i = 0; i < listRecords.size(); i++) {
        for (int j = 0; j < ctps.length; j++) {
            result[i][j] = new StringBuilder();
        }//  ww  w.  jav a  2  s.  co  m
    }

    for (int i = 0; i < listRecords.size(); i++) { //For each record
        Record record = listRecords.get(i);

        //printRecord(record);

        for (int j = 0; j < ctps.length; j++) { //For each CTP
            ConstructionTreePattern ctp = ctps[j];
            ConstructionTreePatternNode ctpNode = ctp.getRoot();
            List<ConstructionTreePatternEdge> childrenEdges = ctp.getChildrenEdges().get(ctpNode);

            StringBuilder[][] resultChildren = null;
            AtomicBoolean[] nullResultChildren = null;
            if (childrenEdges != null && childrenEdges.size() != 0) {
                //Create list CTPs from child nodes
                ConstructionTreePattern[] newCtps = new ConstructionTreePattern[childrenEdges.size()];
                for (int k = 0; k < newCtps.length; k++) {
                    newCtps[k] = ConstructionTreePattern.deepCopySubtree(childrenEdges.get(k).getChild());
                }
                //Holder for booleans for null results
                nullResultChildren = new AtomicBoolean[childrenEdges.size()];
                for (int k = 0; k < nullResultChildren.length; k++) {
                    nullResultChildren[k] = new AtomicBoolean();
                }
                //Create list records
                RecordList newListRecords;
                NestedMetadata newSignature;
                if (ctpNode.getContentType() == ContentType.VARIABLE_PATH) {
                    newListRecords = record.getField(ctpNode.getVarPath().get(0), RecordList.class);
                    newSignature = signature.getNestedChild(ctpNode.getVarPath().get(0));
                } else {
                    newListRecords = new RecordList();
                    newListRecords.add(record);
                    newSignature = signature;
                }
                //Obtain result children
                resultChildren = writeRecord(newListRecords, newSignature, newCtps, nullResultChildren);
            }

            //Construct the subtree starting at this node
            StringBuilder ctpNodeResult = new StringBuilder();
            boolean allNull = allNullUnderNode(nullResultChildren);
            if (!ctpNode.isOptional() || !allNull) {
                //
                if (ctpNode.getContentType() == ContentType.ELEMENT) {
                    ctpNodeResult.append("<" + ctpNode.getValue());
                    int k;
                    for (k = 0; childrenEdges != null && k < childrenEdges.size()
                            && childrenEdges.get(k).getChild().getContentType() == ContentType.ATTRIBUTE; k++) {
                        ctpNodeResult.append(" " + resultChildren[0][k].toString());
                    }
                    if (childrenEdges == null || k == childrenEdges.size()) {
                        ctpNodeResult.append("/>");
                    } else {
                        ctpNodeResult.append(">");
                        for (; k < childrenEdges.size(); k++) {
                            ctpNodeResult.append(resultChildren[0][k].toString());
                        }
                        ctpNodeResult.append("</" + ctpNode.getValue() + ">");
                    }
                } else if (ctpNode.getContentType() == ContentType.ATTRIBUTE) {
                    ctpNodeResult.append(ctpNode.getValue() + "=\"" + resultChildren[0][0].toString() + "\"");
                } else if (ctpNode.getContentType() == ContentType.ELEMENT_VALUE) {
                    ctpNodeResult.append(ctpNode.getValue());
                } else if (ctpNode.getContentType() == ContentType.ATTRIBUTE_VALUE) {
                    ctpNodeResult.append(ctpNode.getValue());
                } else if (ctpNode.getContentType() == ContentType.VARIABLE_PATH
                        && (childrenEdges == null || childrenEdges.size() == 0)) {
                    allNull = true;
                    //Create content from the record
                    List<Integer> varPath = ctpNode.getVarPath();
                    if (varPath.size() == 1) {
                        StringValue v = record.getField(varPath.get(0), StringValue.class);
                        if (!v.getValue().equals("\0")) {
                            ctpNodeResult.append(v);
                            allNull = false;
                        }
                    } else {
                        RecordList list = record.getField(varPath.get(0), RecordList.class);
                        for (int k = 1; k < varPath.size() - 1; k++) {
                            RecordList newList = new RecordList();
                            for (Record nestedRecord : list) {
                                newList.addAll(nestedRecord.getField(varPath.get(k), RecordList.class));
                            }
                            list = newList;
                        }
                        for (Record nestedRecord : list) {
                            StringValue v = nestedRecord.getField(varPath.get(varPath.size() - 1),
                                    StringValue.class);
                            if (!v.getValue().equals("\0")) {
                                ctpNodeResult.append(v);
                                allNull = false;
                            }
                        }
                    }
                } else { //childrenEdges != null
                    //Copy content from children
                    for (int x = 0; x < resultChildren.length; x++) {
                        for (int k = 0; k < resultChildren[x].length; k++) {
                            ctpNodeResult.append(resultChildren[x][k].toString());
                        }
                    }
                }
            }
            result[i][j].append(ctpNodeResult);
            nullResults[j].set(allNull);
        }
    }

    return result;
}

From source file:ddf.catalog.resource.download.ReliableResourceDownloaderTest.java

@Test
@Ignore/* w ww  . jav  a  2s .  c  o m*/
// Can't figure out how to throw IOExcetion from CountingOutputStream
public void testClientOutputStreamException() throws Exception {

    downloaderConfig.setCacheEnabled(true);

    ResourceCache mockCache = mock(ResourceCache.class);
    when(mockCache.isPending(anyString())).thenReturn(false);
    when(mockCache.getProductCacheDirectory()).thenReturn(productCacheDirectory);
    downloaderConfig.setResourceCache(mockCache);

    mis = new MockInputStream(productInputFilename);
    ResourceResponse mockResponse = getMockResourceResponse(mis);

    ReliableResourceDownloader downloader = new ReliableResourceDownloader(downloaderConfig,
            new AtomicBoolean(), "123", mockResponse, getMockRetriever());
    downloader.setupDownload(mockMetacard, new DownloadStatusInfoImpl());

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    CountingOutputStream mockCountingFbos = new CountingOutputStream(baos);
    IOUtils.closeQuietly(baos);

    downloader.setCountingOutputStream(mockCountingFbos);

    downloader.run();

    verify(mockPublisher, times(1)).postRetrievalStatus(any(ResourceResponse.class),
            eq(ProductRetrievalStatus.CANCELLED), any(Metacard.class), anyString(), anyLong(), eq(DOWNLOAD_ID));
    verify(mockCache, times(1)).removePendingCacheEntry(anyString());
    assertThat(downloaderConfig.isCacheEnabled(), is(false));

}

From source file:com.microsoft.alm.plugin.idea.ui.simplecheckout.SimpleCheckoutModel.java

public void cloneRepo() {
    final ModelValidationInfo validationInfo = validate();
    if (validationInfo == null) {
        final Task.Backgroundable createCloneTask = new Task.Backgroundable(project,
                TfPluginBundle.message(TfPluginBundle.KEY_CHECKOUT_DIALOG_TITLE), true,
                PerformInBackgroundOption.DEAF) {
            final AtomicBoolean cloneResult = new AtomicBoolean();

            @Override/*from w ww  .  j a  va2 s  .  co m*/
            public void run(@NotNull final ProgressIndicator progressIndicator) {
                progressIndicator.setText(TfPluginBundle.message(TfPluginBundle.KEY_CHECKOUT_DIALOG_TITLE));
                // get context from manager, and store in active context
                final ServerContext context = ServerContextManager.getInstance().getAuthenticatedContext(gitUrl,
                        true);

                if (context == null) {
                    VcsNotifier.getInstance(project).notifyError(
                            TfPluginBundle
                                    .message(TfPluginBundle.KEY_CHECKOUT_ERRORS_AUTHENTICATION_FAILED_TITLE),
                            TfPluginBundle.message(TfPluginBundle.KEY_ERRORS_AUTH_NOT_SUCCESSFUL, gitUrl));
                    return;
                }

                final String gitRepositoryStr = context.getUsableGitUrl();
                final Git git = ServiceManager.getService(Git.class);
                cloneResult.set(git4idea.checkout.GitCheckoutProvider.doClone(project, git, getDirectoryName(),
                        getParentDirectory(), gitRepositoryStr));

                // Add Telemetry for the clone call along with it's success/failure
                TfsTelemetryHelper.getInstance().sendEvent(COMMANDLINE_CLONE_ACTION,
                        new TfsTelemetryHelper.PropertyMapBuilder().currentOrActiveContext(context)
                                .actionName(COMMANDLINE_CLONE_ACTION).success(cloneResult.get()).build());
            }

            @Override
            public void onSuccess() {
                // if clone was successful then complete the checkout process which gives the option to open the project
                if (cloneResult.get()) {
                    DvcsUtil.addMappingIfSubRoot(project,
                            FileUtil.join(new String[] { parentDirectory, directoryName }), "Git");
                    listener.directoryCheckedOut(new File(parentDirectory, directoryName), GitVcs.getKey());
                    listener.checkoutCompleted();
                }
            }
        };
        createCloneTask.queue();
    }
}

From source file:com.asakusafw.testdriver.inprocess.InProcessJobExecutorTest.java

/**
 * Test method for executing Hadoop job w/ properties.
 *//*from  ww  w  .  j  a v  a2 s . c o m*/
@Test
public void executeJob_w_properties() {
    prepareJobflow();
    AtomicBoolean call = new AtomicBoolean();
    MockHadoopJob.callback((args, conf) -> {
        call.set(true);
        assertThat(conf.get("com.example.testing"), is("true"));
        return 0;
    });

    TestExecutionPlan.Job job = job(MockHadoopJob.class.getName(), "com.example.testing", "true");

    JobExecutor executor = new InProcessJobExecutor(context);
    try {
        executor.execute(job, Collections.emptyMap());
    } catch (IOException e) {
        throw new AssertionError(e);
    }
    assertThat(call.get(), is(true));
}

From source file:io.pravega.segmentstore.server.reading.StorageReaderTests.java

/**
 * Tests the ability to queue dependent reads (subsequent reads that only want to read a part of a previous read).
 * Test this both with successful and failed reads.
 *///from ww  w  . j  a v  a  2 s.  co  m
@Test
public void testDependents() {
    final Duration waitTimeout = Duration.ofSeconds(5);
    TestStorage storage = new TestStorage();
    CompletableFuture<Integer> signal = new CompletableFuture<>();
    AtomicBoolean wasReadInvoked = new AtomicBoolean();
    storage.readImplementation = () -> {
        if (wasReadInvoked.getAndSet(true)) {
            Assert.fail(
                    "Read was invoked multiple times, which is a likely indicator that the requests were not chained.");
        }
        return signal;
    };

    @Cleanup
    StorageReader reader = new StorageReader(SEGMENT_METADATA, storage, executorService());

    // Create some reads.
    CompletableFuture<StorageReader.Result> c1 = new CompletableFuture<>();
    CompletableFuture<StorageReader.Result> c2 = new CompletableFuture<>();
    reader.execute(new StorageReader.Request(0, 100, c1::complete, c1::completeExceptionally, TIMEOUT));
    reader.execute(new StorageReader.Request(50, 100, c2::complete, c2::completeExceptionally, TIMEOUT));

    Assert.assertFalse("One or more of the reads has completed prematurely.", c1.isDone() || c2.isDone());

    signal.completeExceptionally(new IntentionalException());
    AssertExtensions.assertThrows("The first read was not failed with the correct exception.",
            () -> c1.get(waitTimeout.toMillis(), TimeUnit.MILLISECONDS),
            ex -> ex instanceof IntentionalException);

    AssertExtensions.assertThrows("The second read was not failed with the correct exception.",
            () -> c2.get(waitTimeout.toMillis(), TimeUnit.MILLISECONDS),
            ex -> ex instanceof IntentionalException);
}