Example usage for java.util.concurrent LinkedBlockingQueue add

List of usage examples for java.util.concurrent LinkedBlockingQueue add

Introduction

In this page you can find the example usage for java.util.concurrent LinkedBlockingQueue add.

Prototype

boolean add(E e);

Source Link

Document

Inserts the specified element into this queue if it is possible to do so immediately without violating capacity restrictions, returning true upon success and throwing an IllegalStateException if no space is currently available.

Usage

From source file:org.zoneproject.extractor.plugin.spotlight.App.java

public static void main(String[] args) {
    LinkedList<Item> itemsPending = new LinkedList<Item>();
    Prop[] fr = { new Prop(ZoneOntology.PLUGIN_LANG, "\"fr\"") };
    Prop[] en = { new Prop(ZoneOntology.PLUGIN_LANG, "\"en\"") };

    LinkedBlockingQueue<AnnotationThread> annotationThreads;
    HashMap<String, ArrayList<Prop>> propsToSave;
    propsPendingSave = new HashMap<String, ArrayList<Prop>>();
    while (true) {
        annotationThreads = new LinkedBlockingQueue<AnnotationThread>();

        while (true) {//while we can download items

            Item[] enItems = Database.getItemsNotAnotatedForPluginsWithDeps(PLUGIN_URI, en, SIM_DOWNLOADS / 2);
            Item[] frItems = Database.getItemsNotAnotatedForPluginsWithDeps(PLUGIN_URI, fr, SIM_DOWNLOADS / 2);
            Item[] items = (Item[]) ArrayUtils.addAll(enItems, frItems);

            if (items != null && items.length > 0) {
                //check if the item is in annotation process
                for (Item i : items) {
                    boolean exist = false;
                    for (AnnotationThread a : annotationThreads) {
                        if (a.item.getUri().equals(i.getUri())) {
                            exist = true;
                        }/*from w  ww.  jav a2 s  .co  m*/
                    }
                    if (!exist) {
                        itemsPending.add(i);
                    }
                }
            }
            if (itemsPending.isEmpty()) {
                break;
            }

            while (!itemsPending.isEmpty()) {

                //we add new thread until the limit length is thrown
                while (annotationThreads.size() < SIM_ANNOTATE && !itemsPending.isEmpty()) {
                    AnnotationThread newAnnot = new AnnotationThread(itemsPending.removeFirst());
                    newAnnot.start();
                    annotationThreads.add(newAnnot);
                }

                //try{
                //we try to end some terminated threads
                //synchronized(annotationThreads){
                for (AnnotationThread a : annotationThreads) {
                    if (!a.isAlive()) {
                        annotationThreads.remove(a);
                    } else if (a.getDuration() > LIMIT_TIME_FOR_DOWN) {
                        a.interrupt();
                    } else if (a.getDuration() > 10) {
                        logger.info("is alive[" + a.getDuration() + "]: " + a.item.getUri());
                    }
                    //try{Thread.currentThread().sleep(1000);}catch(Exception ie){}//TODO remove
                }
                //}
                //}catch(java.util.ConcurrentModificationException concurrentAccess){
                //    logger.warn("concurrent access!");
                //}

                if (annotationThreads.size() >= SIM_ANNOTATE) {
                    try {
                        Thread.currentThread().sleep(1000);
                    } catch (Exception ie) {
                    }
                }
            }

            logger.info("start saving");
            synchronized (propsPendingSave) {
                propsToSave = (HashMap<String, ArrayList<Prop>>) propsPendingSave.clone();
                propsPendingSave.clear();
            }
            Database.addAnnotations(propsToSave);
            logger.info("end saving");

        }

        logger.info("no more items to annotate");
        try {
            Thread.currentThread().sleep(1000);
        } catch (Exception ie) {
        }
    }
}

From source file:com.offbynull.portmapper.common.UdpCommunicator.java

/**
 * Add a packet to the send queue of this UDP communicator.
 * @param channel channel to send on/*from   ww w.j a  v a  2 s.c o  m*/
 * @param dst destination to send to
 * @param data packet to send
 * @throws NullPointerException if any argument is {@code null}, or if {@code channel} doesn't belong to this communicator
 * @throws IllegalStateException if this communicator isn't running
 */
public void send(DatagramChannel channel, InetSocketAddress dst, ByteBuffer data) {
    Validate.notNull(channel);
    Validate.notNull(dst);
    Validate.notNull(data);
    Validate.validState(isRunning());

    LinkedBlockingQueue<ImmutablePair<InetSocketAddress, ByteBuffer>> queue = sendQueue.get(channel);

    Validate.isTrue(channel != null);

    queue.add(new ImmutablePair<>(dst, ByteBufferUtils.copyContents(data)));

    selector.wakeup();
}

From source file:org.apache.ftpserver.ftpletcontainer.FtpLetReturnDefaultTest.java

public void testLogin() throws Exception {
    final LinkedBlockingQueue<User> loggedInUser = new LinkedBlockingQueue<User>();

    MockFtplet.callback = new MockFtpletCallback() {
        public FtpletResult onLogin(FtpSession session, FtpRequest request) throws FtpException, IOException {
            loggedInUser.add(session.getUser());

            return super.onLogin(session, request);
        }/*from ww  w .j  a va2s . co m*/

    };
    MockFtpletCallback.returnValue = FtpletResult.DEFAULT;

    assertTrue(client.login(ADMIN_USERNAME, ADMIN_PASSWORD));

    assertNotNull(loggedInUser.poll(2000, TimeUnit.MILLISECONDS));
}

From source file:se.vgregion.pubsub.push.impl.PushTest.java

@Test
@Transactional // TODO remove
public void test() throws InterruptedException {
    final URI testUri = URI.create("http://feeds.feedburner.com/protocol7/main");

    PubSubEngine pubSubEngine = applicationContext.getBean(PubSubEngine.class);

    final LinkedBlockingQueue<Feed> publishedFeeds = new LinkedBlockingQueue<Feed>();

    pubSubEngine.subscribe(new Subscriber() {

        @Override/*from  w w  w . j  a  v a 2s. co  m*/
        public void timedOut() {
        }

        @Override
        public void publish(Feed feed, PushJms pushJms) throws PublicationFailedException {
            publishedFeeds.add(feed);
        }

        @Override
        public URI getTopic() {
            return testUri;
        }

        @Override
        public DateTime getTimeout() {
            return null;
        }

        @Override
        public DateTime getLastUpdated() {
            return null;
        }
    });

    //        pubSubEngine.getOrCreateTopic(testUri).addSubscriber(new DefaultPushSubscriber(
    //                applicationContext.getBean(PushSubscriberRepository.class),
    //                testUri, URI.create("http://localhost:9000"), 100, "verify"));

    PushSubscriberManager pushSubscriberManager = applicationContext.getBean(PushSubscriberManager.class);
    pushSubscriberManager.retrive(testUri);

    Feed feed = publishedFeeds.poll(10000, TimeUnit.MILLISECONDS);

    //        Thread.sleep(200000);
}

From source file:se.vgregion.pubsub.push.impl.DefaultPushSubscriberVerifyTest.java

@Test
@Transactional/*  ww w . j  ava  2 s .c om*/
@Rollback
public void verify() throws Exception {
    final LinkedBlockingQueue<HttpRequest> requests = new LinkedBlockingQueue<HttpRequest>();

    server.register("/*", new HttpRequestHandler() {
        @Override
        public void handle(HttpRequest request, HttpResponse response, HttpContext context)
                throws HttpException, IOException {
            requests.add(request);

            response.setEntity(
                    new StringEntity(getQueryParamValue(request.getRequestLine().getUri(), "hub.challenge")));
        }
    });

    subscriber.verify(SubscriptionMode.SUBSCRIBE);

    Assert.assertEquals(1, requests.size());

    HttpRequest actualRequest = requests.poll();
    String requestUri = actualRequest.getRequestLine().getUri();
    Assert.assertEquals("subscribe", getQueryParamValue(requestUri, "hub.mode"));
    Assert.assertEquals(subscriber.getTopic().toString(),
            URLDecoder.decode(getQueryParamValue(requestUri, "hub.topic"), "UTF-8"));
    Assert.assertNotNull(getQueryParamValue(requestUri, "hub.challenge"));
    Assert.assertEquals("123", getQueryParamValue(requestUri, "hub.lease_seconds"));
    Assert.assertEquals(subscriber.getVerifyToken(), getQueryParamValue(requestUri, "hub.verify_token"));
}

From source file:org.rifidi.edge.adapter.alien.Alien9800ReaderSession.java

/**
 * helper method that sends a 'get externalInput' command to the reader. It
 * blocks until the response returns//from  w  ww.  ja va 2  s . co m
 * 
 * @return the External Input
 * @throws CannotExecuteException
 */
public int getExternalInput() throws CannotExecuteException {
    LinkedBlockingQueue<AlienCommandObjectWrapper> commandObj = new LinkedBlockingQueue<AlienCommandObjectWrapper>();
    commandObj.add(new AlienCommandObjectWrapper(Alien9800Reader.PROP_EXTERNAL_INPUT,
            new AlienGetCommandObject(COMMAND_EXTERNAL_INPUT)));
    boolean executed = ((Alien9800Reader) this.getSensor()).applyPropertyChanges(commandObj, true);
    if (executed) {
        return ((Alien9800Reader) this.getSensor()).getExternalInput();

    } else {
        throw new CannotExecuteException("The GPI command may not have executed");
    }

}

From source file:se.vgregion.pubsub.push.impl.DefaultPushSubscriberPublishTest.java

@Test
public void publish() throws Exception {

    subscriber = new DefaultPushSubscriber(UnitTestConstants.TOPIC, buildTestUrl("/"), UnitTestConstants.FUTURE,
            UnitTestConstants.UPDATED1, 100, "verify", UnitTestConstants.SECRET, true);

    final LinkedBlockingQueue<HttpRequest> issuedRequests = new LinkedBlockingQueue<HttpRequest>();
    final LinkedBlockingQueue<byte[]> issuedRequestBodies = new LinkedBlockingQueue<byte[]>();
    server.register("/*", new HttpRequestHandler() {
        @Override//from  ww  w. j  a v a  2 s  . c o  m
        public void handle(HttpRequest request, HttpResponse response, HttpContext context)
                throws HttpException, IOException {
            issuedRequests.add(request);

            HttpEntity entity = ((HttpEntityEnclosingRequest) request).getEntity();
            ByteArrayOutputStream buffer = new ByteArrayOutputStream();
            entity.writeTo(buffer);
            issuedRequestBodies.add(buffer.toByteArray());
        }
    });

    Feed feed = new FeedBuilder(ContentType.ATOM).id("e1")
            .entry(new EntryBuilder().id("f1").updated(new DateTime()).build())
            .entry(new EntryBuilder().id("f2").updated(UnitTestConstants.UPDATED1.minusHours(1)).build())
            .build();

    subscriber.publish(feed, null);

    // subscriber should be updated
    Assert.assertEquals(new DateTime(), subscriber.getLastUpdated());

    HttpRequest request = issuedRequests.poll(10000, TimeUnit.MILLISECONDS);
    Assert.assertNotNull(request);
    Assert.assertEquals(ContentType.ATOM.toString(), request.getFirstHeader("Content-Type").getValue());

    // verify HMAC header
    Assert.assertEquals("sha1=1356b52665408a17af46803a7988e48d40d1fb75",
            request.getFirstHeader("X-Hub-Signature").getValue());

    // verify content
    Assert.assertTrue(request instanceof HttpEntityEnclosingRequest);

    HttpEntity entity = ((HttpEntityEnclosingRequest) request).getEntity();

    Assert.assertNotNull(entity);

    Document actualAtom = new Builder().build(new ByteArrayInputStream(issuedRequestBodies.poll()));

    Assert.assertEquals(1, actualAtom.getRootElement().getChildElements("entry", Namespaces.ATOM).size());

}

From source file:org.ala.spatial.analysis.index.LayerDistanceIndex.java

/**
 * @param threadcount    number of threads to run analysis.
 * @param onlyThesePairs array of distances to run as fieldId1 + " " +
 *                       fieldId2 where fieldId1.compareTo(fieldId2) &lt 0 or null for all missing
 *                       distances.// w  ww  .  j  a  va2 s .  c o  m
 * @throws InterruptedException
 */
public void occurrencesUpdate(int threadcount, String[] onlyThesePairs) throws InterruptedException {

    //create distances file if it does not exist.
    File layerDistancesFile = new File(AlaspatialProperties.getAnalysisWorkingDir() + LAYER_DISTANCE_FILE);
    if (!layerDistancesFile.exists()) {
        try {
            FileWriter fw = new FileWriter(layerDistancesFile);
            fw.close();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    Map<String, Double> map = loadDistances();

    LinkedBlockingQueue<String> todo = new LinkedBlockingQueue();

    if (onlyThesePairs != null && onlyThesePairs.length > 0) {
        for (String s : onlyThesePairs) {
            todo.add(s);
        }
    } else {
        //find all environmental layer analysis files
        File root = new File(AlaspatialProperties.getAnalysisLayersDir());
        File[] dirs = root.listFiles(new FileFilter() {

            @Override
            public boolean accept(File pathname) {
                return pathname != null && pathname.isDirectory();
            }
        });

        HashMap<String, String> domains = new HashMap<String, String>();
        for (File dir : dirs) {
            //iterate through files so we get everything
            File[] files = new File(dir.getPath()).listFiles(new FileFilter() {

                @Override
                public boolean accept(File pathname) {
                    return pathname.getName().endsWith(".grd") && pathname.getName().startsWith("el");
                }
            });

            for (int i = 0; i < files.length; i++) {
                for (int j = i + 1; j < files.length; j++) {
                    String file1 = files[i].getName().replace(".grd", "");
                    String file2 = files[j].getName().replace(".grd", "");

                    //only operate on file names that are valid fields
                    if (Client.getFieldDao().getFieldById(file1) != null
                            && Client.getFieldDao().getFieldById(file2) != null) {

                        String domain1 = domains.get(file1);
                        if (domain1 == null) {
                            String pid1 = Client.getFieldDao().getFieldById(file1).getSpid();
                            domain1 = Client.getLayerDao().getLayerById(Integer.parseInt(pid1)).getdomain();
                            domains.put(file1, domain1);
                        }
                        String domain2 = domains.get(file2);
                        if (domain2 == null) {
                            String pid2 = Client.getFieldDao().getFieldById(file2).getSpid();
                            domain2 = Client.getLayerDao().getLayerById(Integer.parseInt(pid2)).getdomain();
                            domains.put(file2, domain2);
                        }

                        String key = (file1.compareTo(file2) < 0) ? file1 + " " + file2 : file2 + " " + file1;

                        //domain test
                        if (isSameDomain(parseDomain(domain1), parseDomain(domain2))) {
                            if (!map.containsKey(key) && !todo.contains(key)) {
                                todo.put(key);
                            }
                        }
                    }
                }
            }
        }
    }

    LinkedBlockingQueue<String> toDisk = new LinkedBlockingQueue<String>();
    CountDownLatch cdl = new CountDownLatch(todo.size());
    CalcThread[] threads = new CalcThread[threadcount];
    for (int i = 0; i < threadcount; i++) {
        threads[i] = new CalcThread(cdl, todo, toDisk);
        threads[i].start();
    }

    ToDiskThread toDiskThread = new ToDiskThread(
            AlaspatialProperties.getAnalysisWorkingDir() + LAYER_DISTANCE_FILE, toDisk);
    toDiskThread.start();

    cdl.await();

    for (int i = 0; i < threadcount; i++) {
        threads[i].interrupt();
    }

    toDiskThread.interrupt();
}

From source file:org.apache.bookkeeper.metadata.etcd.EtcdLedgerManagerTest.java

@Test
public void testRegisterLedgerMetadataListener() throws Exception {
    long ledgerId = System.currentTimeMillis();

    // create a ledger metadata
    LedgerMetadata metadata = LedgerMetadataBuilder.create().withEnsembleSize(3).withWriteQuorumSize(3)
            .withAckQuorumSize(2).withPassword("test-password".getBytes(UTF_8))
            .withDigestType(DigestType.CRC32C.toApiDigestType()).newEnsembleEntry(0L, createNumBookies(3))
            .build();//from   w ww .  j  a v  a  2s  .  c o  m
    result(lm.createLedgerMetadata(ledgerId, metadata));
    Versioned<LedgerMetadata> readMetadata = lm.readLedgerMetadata(ledgerId).get();
    log.info("Create ledger metadata : {}", readMetadata.getValue());

    // register first listener

    LinkedBlockingQueue<Versioned<LedgerMetadata>> metadataQueue1 = new LinkedBlockingQueue<>();
    LedgerMetadataListener listener1 = (lid, m) -> {
        log.info("[listener1] Received ledger {} metadata : {}", lid, m);
        metadataQueue1.add(m);
    };
    log.info("Registered first listener for ledger {}", ledgerId);
    lm.registerLedgerMetadataListener(ledgerId, listener1);
    // we should receive a metadata notification when a ledger is created
    Versioned<LedgerMetadata> notifiedMetadata = metadataQueue1.take();
    assertEquals(readMetadata, notifiedMetadata);
    ValueStream<LedgerMetadata> lms = lm.getLedgerMetadataStream(ledgerId);
    assertNotNull(lms.waitUntilWatched());
    assertNotNull(result(lms.waitUntilWatched()));

    // register second listener

    LinkedBlockingQueue<Versioned<LedgerMetadata>> metadataQueue2 = new LinkedBlockingQueue<>();
    LedgerMetadataListener listener2 = (lid, m) -> {
        log.info("[listener2] Received ledger {} metadata : {}", lid, m);
        metadataQueue2.add(m);
    };
    log.info("Registered second listener for ledger {}", ledgerId);
    lm.registerLedgerMetadataListener(ledgerId, listener2);
    Versioned<LedgerMetadata> notifiedMetadata2 = metadataQueue2.take();
    assertEquals(readMetadata, notifiedMetadata2);
    assertNotNull(lm.getLedgerMetadataStream(ledgerId));

    // update the metadata
    lm.writeLedgerMetadata(ledgerId,
            LedgerMetadataBuilder.from(metadata).newEnsembleEntry(10L, createNumBookies(3)).build(),
            notifiedMetadata.getVersion()).get();
    readMetadata = lm.readLedgerMetadata(ledgerId).get();
    assertEquals(readMetadata, metadataQueue1.take());
    assertEquals(readMetadata, metadataQueue2.take());
    lms = lm.getLedgerMetadataStream(ledgerId);
    assertNotNull(lms);
    assertEquals(2, lms.getNumConsumers());

    // remove listener2
    lm.unregisterLedgerMetadataListener(ledgerId, listener2);
    lms = lm.getLedgerMetadataStream(ledgerId);
    assertNotNull(lms);
    assertEquals(1, lms.getNumConsumers());

    // update the metadata again
    lm.writeLedgerMetadata(ledgerId,
            LedgerMetadataBuilder.from(metadata).newEnsembleEntry(20L, createNumBookies(3)).build(),
            readMetadata.getVersion()).get();
    readMetadata = lm.readLedgerMetadata(ledgerId).get();
    assertEquals(readMetadata, metadataQueue1.take());
    assertNull(metadataQueue2.poll());

    // remove listener1
    lm.unregisterLedgerMetadataListener(ledgerId, listener1);
    // the value stream will be removed
    while (lm.getLedgerMetadataStream(ledgerId) != null) {
        TimeUnit.MILLISECONDS.sleep(100);
    }
    assertEquals(0, lms.getNumConsumers());

    // update the metadata again
    lm.writeLedgerMetadata(ledgerId,
            LedgerMetadataBuilder.from(metadata).newEnsembleEntry(30L, createNumBookies(3)).build(),
            readMetadata.getVersion()).get();
    readMetadata = lm.readLedgerMetadata(ledgerId).get();
    assertNull(metadataQueue1.poll());
    assertNull(metadataQueue2.poll());

    log.info("Registered first listener for ledger {} again", ledgerId);
    lm.registerLedgerMetadataListener(ledgerId, listener1);
    notifiedMetadata = metadataQueue1.take();
    assertEquals(readMetadata, notifiedMetadata);
    lms = lm.getLedgerMetadataStream(ledgerId);
    assertNotNull(lms);
    assertEquals(1, lms.getNumConsumers());

    // delete the ledger
    lm.removeLedgerMetadata(ledgerId, readMetadata.getVersion()).get();
    // the listener will eventually be removed
    while (lm.getLedgerMetadataStream(ledgerId) != null) {
        TimeUnit.MILLISECONDS.sleep(100);
    }
    assertEquals(1, lms.getNumConsumers());
    assertNull(metadataQueue1.poll());
    assertNull(metadataQueue2.poll());
}

From source file:au.org.ala.spatial.analysis.layers.LayerDistanceIndex.java

/**
 * @param threadcount    number of threads to run analysis.
 * @param onlyThesePairs array of distances to run as fieldId1 + " " +
 *                       fieldId2 where fieldId1.compareTo(fieldId2) &lt 0 or null for all missing
 *                       distances.//from ww  w .j a v  a2 s .com
 * @throws InterruptedException
 */
public void occurrencesUpdate(int threadcount, String[] onlyThesePairs) throws InterruptedException {

    //create distances file if it does not exist.
    File layerDistancesFile = new File(IntersectConfig.getAlaspatialOutputPath() + LAYER_DISTANCE_FILE);
    if (!layerDistancesFile.exists()) {
        FileWriter fw = null;
        try {
            fw = new FileWriter(layerDistancesFile);
            fw.flush();
        } catch (Exception e) {
            logger.error(e.getMessage(), e);
        } finally {
            if (fw != null) {
                try {
                    fw.close();
                } catch (Exception e) {
                    logger.error(e.getMessage(), e);
                }
            }
        }
    }

    Map<String, Double> map = loadDistances();

    LinkedBlockingQueue<String> todo = new LinkedBlockingQueue();

    if (onlyThesePairs != null && onlyThesePairs.length > 0) {
        for (String s : onlyThesePairs) {
            todo.add(s);
        }
    } else {
        //find all environmental layer analysis files
        File root = new File(IntersectConfig.getAlaspatialOutputPath());
        File[] dirs = root.listFiles(new FileFilter() {

            @Override
            public boolean accept(File pathname) {
                return pathname != null && pathname.isDirectory();
            }
        });

        HashMap<String, String> domains = new HashMap<String, String>();
        for (File dir : dirs) {
            //iterate through files so we get everything
            File[] files = new File(dir.getPath()).listFiles(new FileFilter() {

                @Override
                public boolean accept(File pathname) {
                    return pathname.getName().endsWith(".grd") && pathname.getName().startsWith("el");
                }
            });

            for (int i = 0; i < files.length; i++) {
                for (int j = i + 1; j < files.length; j++) {
                    String file1 = files[i].getName().replace(".grd", "");
                    String file2 = files[j].getName().replace(".grd", "");

                    //only operate on file names that are valid fields
                    if (Client.getFieldDao().getFieldById(file1) != null
                            && Client.getFieldDao().getFieldById(file2) != null) {

                        String domain1 = domains.get(file1);
                        if (domain1 == null) {
                            String pid1 = Client.getFieldDao().getFieldById(file1).getSpid();
                            domain1 = Client.getLayerDao().getLayerById(Integer.parseInt(pid1)).getdomain();
                            domains.put(file1, domain1);
                        }
                        String domain2 = domains.get(file2);
                        if (domain2 == null) {
                            String pid2 = Client.getFieldDao().getFieldById(file2).getSpid();
                            domain2 = Client.getLayerDao().getLayerById(Integer.parseInt(pid2)).getdomain();
                            domains.put(file2, domain2);
                        }

                        String key = (file1.compareTo(file2) < 0) ? file1 + " " + file2 : file2 + " " + file1;

                        //domain test
                        if (isSameDomain(parseDomain(domain1), parseDomain(domain2))) {
                            if (!map.containsKey(key) && !todo.contains(key)) {
                                todo.put(key);
                            }
                        }
                    }
                }
            }
        }
    }

    LinkedBlockingQueue<String> toDisk = new LinkedBlockingQueue<String>();
    CountDownLatch cdl = new CountDownLatch(todo.size());
    CalcThread[] threads = new CalcThread[threadcount];
    for (int i = 0; i < threadcount; i++) {
        threads[i] = new CalcThread(cdl, todo, toDisk);
        threads[i].start();
    }

    ToDiskThread toDiskThread = new ToDiskThread(
            IntersectConfig.getAlaspatialOutputPath() + LAYER_DISTANCE_FILE, toDisk);
    toDiskThread.start();

    cdl.await();

    for (int i = 0; i < threadcount; i++) {
        threads[i].interrupt();
    }

    toDiskThread.interrupt();
}