Example usage for com.google.common.util.concurrent ListenableFuture get

List of usage examples for com.google.common.util.concurrent ListenableFuture get

Introduction

In this page you can find the example usage for com.google.common.util.concurrent ListenableFuture get.

Prototype

V get() throws InterruptedException, ExecutionException;

Source Link

Document

Waits if necessary for the computation to complete, and then retrieves its result.

Usage

From source file:io.geobit.chain.dispatchers.BalanceAndReceivedDispatcher.java

public Long getReceived(String address, int cont) {
    if (cont > 5)
        return null;
    Long valCache = recentCache.getIfPresent("r/" + address);
    if (valCache != null)
        return valCache;
    valCache = cache.getIfPresent("r/" + address);
    ReceivedProvider rec1 = receivedProviders.take();
    ReceivedProvider rec2 = receivedProviders.takeDifferent(rec1);
    log("rec1=" + rec1 + " rec2=" + rec2);

    Callable<Long> runner1 = new ReceivedRunnable(rec1, address);
    Callable<Long> runner2 = new ReceivedRunnable(rec2, address);
    final Long start = System.currentTimeMillis();
    ListenableFuture<Long> listenableFuture1 = moreExecutor.submit(runner1);
    ListenableFuture<Long> listenableFuture2 = moreExecutor.submit(runner2);
    SettableFuture<Long> returned = SettableFuture.create();
    Futures.addCallback(listenableFuture1,
            new ReceivedFutureCallback(start, rec1, returned, receivedProviders));
    Futures.addCallback(listenableFuture2,
            new ReceivedFutureCallback(start, rec2, returned, receivedProviders));
    Runnable checker = new ReceivedCheckRunnable(address, listenableFuture1, rec1, listenableFuture2, rec2,
            receivedProviders, cache);//from ww  w .j a v  a  2  s .  com
    moreExecutor.execute(checker);

    Long valRet;
    try {
        valRet = returned.get(); /* return the faster */
        if (valCache != null && valCache.equals(valRet)) {
            recentCache.put("r/" + address, valRet);
            return valRet;
        }

        Long first = listenableFuture1.get();
        Long second = listenableFuture2.get();
        if (first != null && first.equals(second)) {
            cache.put("r/" + address, first);
            recentCache.put("r/" + address, first);
            return first;
        }
        if (first != null)
            cache.put("r/" + address, first);
        else if (second != null)
            cache.put("r/" + address, second);

    } catch (Exception e) {
        error("BalanceAndReceivedDispatcher getReceived " + e.getMessage());
    }
    return getBalance(address, cont + 1);
}

From source file:me.lucko.luckperms.commands.migration.subcommands.MigrationPowerfulPerms.java

private CommandResult run(LuckPermsPlugin plugin, List<String> args) {
    final Logger log = plugin.getLog();
    if (!plugin.isPluginLoaded("PowerfulPerms")) {
        log.severe("PowerfulPerms Migration: Error -> PowerfulPerms is not loaded.");
        return CommandResult.STATE_ERROR;
    }/*from   w  w  w  .jav  a  2s .  c o  m*/

    final String address = args.get(0);
    final String database = args.get(1);
    final String username = args.get(2);
    final String password = args.get(3);
    final String dbTable = args.get(4);

    // Find a list of UUIDs
    log.info("PowerfulPerms Migration: Getting a list of UUIDs to migrate.");

    @Cleanup
    HikariDataSource hikari = new HikariDataSource();
    hikari.setMaximumPoolSize(2);
    hikari.setDataSourceClassName("com.mysql.jdbc.jdbc2.optional.MysqlDataSource");
    hikari.addDataSourceProperty("serverName", address.split(":")[0]);
    hikari.addDataSourceProperty("port", address.split(":")[1]);
    hikari.addDataSourceProperty("databaseName", database);
    hikari.addDataSourceProperty("user", username);
    hikari.addDataSourceProperty("password", password);

    Set<UUID> uuids = new HashSet<>();

    try {
        @Cleanup
        Connection connection = hikari.getConnection();
        DatabaseMetaData meta = connection.getMetaData();

        @Cleanup
        ResultSet tables = meta.getTables(null, null, dbTable, null);
        if (!tables.next()) {
            log.severe("PowerfulPerms Migration: Error - Couldn't find table.");
            return CommandResult.FAILURE;

        } else {
            @Cleanup
            PreparedStatement columnPs = connection.prepareStatement(
                    "SELECT COLUMN_NAME, COLUMN_TYPE FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME=?");
            columnPs.setString(1, dbTable);
            @Cleanup
            ResultSet columnRs = columnPs.executeQuery();

            log.info("Found table: " + dbTable);
            while (columnRs.next()) {
                log.info("" + columnRs.getString("COLUMN_NAME") + " - " + columnRs.getString("COLUMN_TYPE"));
            }

            @Cleanup
            PreparedStatement preparedStatement = connection.prepareStatement("SELECT `uuid` FROM " + dbTable);
            @Cleanup
            ResultSet resultSet = preparedStatement.executeQuery();

            while (resultSet.next()) {
                uuids.add(UUID.fromString(resultSet.getString("uuid")));
            }
        }

    } catch (Exception e) {
        e.printStackTrace();
        return CommandResult.FAILURE;
    }

    if (uuids.isEmpty()) {
        log.severe("PowerfulPerms Migration: Error - Unable to find any UUIDs to migrate.");
        return CommandResult.FAILURE;
    }

    log.info("PowerfulPerms Migration: Found " + uuids.size() + " uuids. Starting migration.");

    PowerfulPermsPlugin ppPlugin = (PowerfulPermsPlugin) plugin.getPlugin("PowerfulPerms");
    PermissionManager pm = ppPlugin.getPermissionManager();

    // Groups first.
    log.info("PowerfulPerms Migration: Starting group migration.");
    Map<Integer, Group> groups = pm.getGroups(); // All versions
    for (Group g : groups.values()) {
        plugin.getDatastore().createAndLoadGroup(g.getName().toLowerCase());
        final me.lucko.luckperms.groups.Group group = plugin.getGroupManager().get(g.getName().toLowerCase());
        try {
            LogEntry.build().actor(Constants.getConsoleUUID()).actorName(Constants.getConsoleName())
                    .acted(group).action("create").build().submit(plugin);
        } catch (Exception ex) {
            ex.printStackTrace();
        }

        for (Permission p : g.getOwnPermissions()) { // All versions
            applyPerm(group, p, plugin);
        }

        for (Group parent : g.getParents()) { // All versions
            try {
                group.setPermission("group." + parent.getName().toLowerCase(), true);
                LogEntry.build().actor(Constants.getConsoleUUID()).actorName(Constants.getConsoleName())
                        .acted(group).action("setinherit " + parent.getName().toLowerCase()) // All versions
                        .build().submit(plugin);
            } catch (Exception ex) {
                if (!(ex instanceof ObjectAlreadyHasException)) {
                    ex.printStackTrace();
                }
            }
        }

        plugin.getDatastore().saveGroup(group);
    }
    log.info("PowerfulPerms Migration: Group migration complete.");

    // Now users.
    log.info("PowerfulPerms Migration: Starting user migration.");
    final Map<UUID, CountDownLatch> progress = new HashMap<>();

    // Migrate all users and their groups
    for (UUID uuid : uuids) {
        progress.put(uuid, new CountDownLatch(2));

        // Create a LuckPerms user for the UUID
        plugin.getDatastore().loadUser(uuid, "null");
        User user = plugin.getUserManager().get(uuid);

        // Get a list of Permissions held by the user from the PP API.
        getPlayerPermissions(pm, uuid, perms -> { // Changes each version
            perms.forEach(p -> applyPerm(user, p, plugin));

            // Update the progress so the user can be saved and unloaded.
            synchronized (progress) {
                progress.get(uuid).countDown();
                if (progress.get(uuid).getCount() == 0) {
                    plugin.getDatastore().saveUser(user);
                    plugin.getUserManager().cleanup(user);
                }
            }
        });

        // Migrate the user's groups to LuckPerms from PP.
        Callback<Map<String, List<CachedGroup>>> callback = groups1 -> {
            for (Map.Entry<String, List<CachedGroup>> e : groups1.entrySet()) {
                final String server;
                if (e.getKey() != null && (e.getKey().equals("") || e.getKey().equalsIgnoreCase("all"))) {
                    server = null;
                } else {
                    server = e.getKey();
                }

                if (superLegacy) {
                    e.getValue().stream().filter(cg -> !cg.isNegated()).map(cg -> {
                        try {
                            return (Group) getGroupMethod.invoke(cg);
                        } catch (IllegalAccessException | InvocationTargetException e1) {
                            e1.printStackTrace();
                            return null;
                        }
                    }).forEach(g -> {
                        if (g != null) {
                            if (server == null) {
                                try {
                                    user.setPermission("group." + g.getName().toLowerCase(), true);
                                    LogEntry.build().actor(Constants.getConsoleUUID())
                                            .actorName(Constants.getConsoleName()).acted(user)
                                            .action("addgroup " + g.getName().toLowerCase()).build()
                                            .submit(plugin);
                                } catch (Exception ex) {
                                    if (!(ex instanceof ObjectAlreadyHasException)) {
                                        ex.printStackTrace();
                                    }
                                }
                            } else {
                                try {
                                    user.setPermission("group." + g.getName().toLowerCase(), true, server);
                                    LogEntry.build().actor(Constants.getConsoleUUID())
                                            .actorName(Constants.getConsoleName()).acted(user)
                                            .action("addgroup " + g.getName().toLowerCase() + " " + server)
                                            .build().submit(plugin);
                                } catch (Exception ex) {
                                    if (!(ex instanceof ObjectAlreadyHasException)) {
                                        ex.printStackTrace();
                                    }
                                }
                            }
                        }
                    });
                } else {
                    e.getValue().stream().filter(g -> !g.hasExpired() && !g.isNegated()).forEach(g -> {
                        final Group group = pm.getGroup(g.getGroupId());
                        if (g.willExpire()) {
                            if (server == null) {
                                try {
                                    user.setPermission("group." + group.getName().toLowerCase(), true,
                                            g.getExpirationDate().getTime() / 1000L);
                                    LogEntry.build().actor(Constants.getConsoleUUID())
                                            .actorName(Constants.getConsoleName()).acted(user)
                                            .action("addtempgroup " + group.getName().toLowerCase() + " "
                                                    + g.getExpirationDate().getTime() / 1000L)
                                            .build().submit(plugin);
                                } catch (Exception ex) {
                                    if (!(ex instanceof ObjectAlreadyHasException)) {
                                        ex.printStackTrace();
                                    }
                                }
                            } else {
                                try {
                                    user.setPermission("group." + group.getName().toLowerCase(), true, server,
                                            g.getExpirationDate().getTime() / 1000L);
                                    LogEntry.build().actor(Constants.getConsoleUUID())
                                            .actorName(Constants.getConsoleName()).acted(user)
                                            .action("addtempgroup " + group.getName().toLowerCase() + " "
                                                    + g.getExpirationDate().getTime() / 1000L + " " + server)
                                            .build().submit(plugin);
                                } catch (Exception ex) {
                                    if (!(ex instanceof ObjectAlreadyHasException)) {
                                        ex.printStackTrace();
                                    }
                                }
                            }

                        } else {
                            if (server == null) {
                                try {
                                    user.setPermission("group." + group.getName().toLowerCase(), true);
                                    LogEntry.build().actor(Constants.getConsoleUUID())
                                            .actorName(Constants.getConsoleName()).acted(user)
                                            .action("addgroup " + group.getName().toLowerCase()).build()
                                            .submit(plugin);
                                } catch (Exception ex) {
                                    if (!(ex instanceof ObjectAlreadyHasException)) {
                                        ex.printStackTrace();
                                    }
                                }
                            } else {
                                try {
                                    user.setPermission("group." + group.getName().toLowerCase(), true, server);
                                    LogEntry.build().actor(Constants.getConsoleUUID())
                                            .actorName(Constants.getConsoleName()).acted(user)
                                            .action("addgroup " + group.getName().toLowerCase() + " " + server)
                                            .build().submit(plugin);
                                } catch (Exception ex) {
                                    if (!(ex instanceof ObjectAlreadyHasException)) {
                                        ex.printStackTrace();
                                    }
                                }
                            }
                        }
                    });
                }
            }

            // Update the progress so the user can be saved and unloaded.
            synchronized (progress) {
                progress.get(uuid).countDown();
                if (progress.get(uuid).getCount() == 0) {
                    plugin.getDatastore().saveUser(user);
                    plugin.getUserManager().cleanup(user);
                }
            }
        };

        if (!legacy) {
            try {
                ListenableFuture<LinkedHashMap<String, List<CachedGroup>>> future = (ListenableFuture<LinkedHashMap<String, List<CachedGroup>>>) getPlayerGroupsMethod
                        .invoke(pm, uuid);
                try {
                    if (future.isDone()) {
                        callback.onComplete(future.get());
                    } else {
                        future.addListener(() -> {
                            try {
                                callback.onComplete(future.get());
                            } catch (InterruptedException | ExecutionException e) {
                                e.printStackTrace();
                            }
                        }, Runnable::run);
                    }
                } catch (InterruptedException | ExecutionException e) {
                    e.printStackTrace();
                }
            } catch (IllegalAccessException | InvocationTargetException e) {
                log.info("PowerfulPerms Migration: Error");
                e.printStackTrace();
            }
        } else {
            try {
                getPlayerGroupsMethod.invoke(pm, uuid,
                        new LPResultRunnable<LinkedHashMap<String, List<CachedGroup>>>() {
                            @Override
                            public void run() {
                                callback.onComplete(getResult());
                            }
                        });
            } catch (IllegalAccessException | InvocationTargetException e) {
                log.info("PowerfulPerms Migration: Error");
                e.printStackTrace();
            }
        }
    }

    // All groups are migrated, but there may still be some users being migrated.
    // This block will wait for all users to be completed.
    log.info("PowerfulPerms Migration: Waiting for user migration to complete. This may take some time");
    boolean sleep = true;
    while (sleep) {
        sleep = false;

        for (Map.Entry<UUID, CountDownLatch> e : progress.entrySet()) {
            if (e.getValue().getCount() != 0) {
                sleep = true;
                break;
            }
        }

        if (sleep) {
            try {
                Thread.sleep(5000);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }

    }

    // We done.
    log.info("PowerfulPerms Migration: Success! Completed without any errors.");
    return CommandResult.SUCCESS;
}

From source file:io.druid.segment.realtime.appenderator.AppenderatorImpl.java

@Override
public void clear() throws InterruptedException {
    // Drop commit metadata, then abandon all segments.

    try {/*from   w w  w  . j av  a  2 s.c  o  m*/
        final ListenableFuture<?> uncommitFuture = persistExecutor.submit(new Callable<Object>() {
            @Override
            public Object call() throws Exception {
                objectMapper.writeValue(computeCommitFile(), Committed.nil());
                return null;
            }
        });

        // Await uncommit.
        uncommitFuture.get();

        // Drop everything.
        final List<ListenableFuture<?>> futures = Lists.newArrayList();
        for (Map.Entry<SegmentIdentifier, Sink> entry : sinks.entrySet()) {
            futures.add(abandonSegment(entry.getKey(), entry.getValue(), true));
        }

        // Await dropping.
        Futures.allAsList(futures).get();
    } catch (ExecutionException e) {
        throw Throwables.propagate(e);
    }
}

From source file:io.druid.query.metadata.SegmentMetadataQueryRunnerFactory.java

@Override
public QueryRunner<SegmentAnalysis> mergeRunners(ExecutorService exec,
        Iterable<QueryRunner<SegmentAnalysis>> queryRunners) {
    final ListeningExecutorService queryExecutor = MoreExecutors.listeningDecorator(exec);
    return new ConcatQueryRunner<SegmentAnalysis>(Sequences.map(Sequences.simple(queryRunners),
            new Function<QueryRunner<SegmentAnalysis>, QueryRunner<SegmentAnalysis>>() {
                @Override//from w  w  w . j av a2s . co m
                public QueryRunner<SegmentAnalysis> apply(final QueryRunner<SegmentAnalysis> input) {
                    return new QueryRunner<SegmentAnalysis>() {
                        @Override
                        public Sequence<SegmentAnalysis> run(final Query<SegmentAnalysis> query,
                                final Map<String, Object> responseContext) {
                            final int priority = query.getContextPriority(0);
                            ListenableFuture<Sequence<SegmentAnalysis>> future = queryExecutor.submit(
                                    new AbstractPrioritizedCallable<Sequence<SegmentAnalysis>>(priority) {
                                        @Override
                                        public Sequence<SegmentAnalysis> call() throws Exception {
                                            return Sequences
                                                    .simple(Sequences.toList(input.run(query, responseContext),
                                                            new ArrayList<SegmentAnalysis>()));
                                        }
                                    });
                            try {
                                queryWatcher.registerQuery(query, future);
                                final Number timeout = query.getContextValue(QueryContextKeys.TIMEOUT,
                                        (Number) null);
                                return timeout == null ? future.get()
                                        : future.get(timeout.longValue(), TimeUnit.MILLISECONDS);
                            } catch (InterruptedException e) {
                                log.warn(e, "Query interrupted, cancelling pending results, query id [%s]",
                                        query.getId());
                                future.cancel(true);
                                throw new QueryInterruptedException("Query interrupted");
                            } catch (CancellationException e) {
                                throw new QueryInterruptedException("Query cancelled");
                            } catch (TimeoutException e) {
                                log.info("Query timeout, cancelling pending results for query id [%s]",
                                        query.getId());
                                future.cancel(true);
                                throw new QueryInterruptedException("Query timeout");
                            } catch (ExecutionException e) {
                                throw Throwables.propagate(e.getCause());
                            }
                        }
                    };
                }
            }));
}

From source file:org.waveprotocol.box.server.waveletstate.block.BlockWaveletStateImpl.java

@Override
public ListenableFuture<Map<String, Block>> readBlocks(final Set<String> blockIds)
        throws WaveletStateException {
    checkOpened();/*w ww . j  av  a 2  s  .c om*/
    final Map<String, Block> blocks = new ConcurrentHashMap<>();
    Set<String> missingBlockIds = CollectionUtils.newHashSet();
    List<ListenableFuture<Block>> blockFutures = CollectionUtils.newLinkedList();
    for (String blockId : blockIds) {
        Block block = blockCache.getBlock(blockId);
        if (block != null) {
            blocks.put(block.getBlockId(), block);
        } else {
            ListenableFuture<Block> blockFuture = readBlocksFutures.get(blockId);
            if (blockFuture != null) {
                blockFutures.add(blockFuture);
            } else {
                missingBlockIds.add(blockId);
            }
        }
    }
    if (!missingBlockIds.isEmpty()) {
        blockFutures.addAll(executeReadBlocksRequest(missingBlockIds));
    }
    final SettableFuture<Map<String, Block>> future = SettableFuture.create();
    if (!blockFutures.isEmpty()) {
        for (final ListenableFuture<Block> blockFuture : blockFutures) {
            blockFuture.addListener(new Runnable() {

                @Override
                public void run() {
                    try {
                        blocks.put(blockFuture.get().getBlockId(), blockFuture.get());
                        if (blocks.keySet().containsAll(blockIds)) {
                            future.set(blocks);
                        }
                    } catch (InterruptedException | ExecutionException ex) {
                        future.setException(ex);
                    }
                }
            }, MoreExecutors.sameThreadExecutor());
        }
    } else {
        future.set(blocks);
    }
    return future;
}

From source file:com.ibm.stocator.fs.cos.COSBlockOutputStream.java

/**
 * Upload the current block as a single PUT request; if the buffer is empty a
 * 0-byte PUT will be invoked, as it is needed to create an entry at the far
 * end.//w w w  .  j a va  2s .c o m
 *
 * @throws IOException any problem
 */
private void putObject() throws IOException {
    LOG.debug("Executing regular upload for {}", writeOperationHelper);

    final COSDataBlocks.DataBlock block = getActiveBlock();
    int size = block.dataSize();
    final COSDataBlocks.BlockUploadData uploadData = block.startUpload();
    final PutObjectRequest putObjectRequest = uploadData.hasFile()
            ? writeOperationHelper.newPutRequest(uploadData.getFile())
            : writeOperationHelper.newPutRequest(uploadData.getUploadStream(), size);

    final ObjectMetadata om = new ObjectMetadata();
    om.setUserMetadata(mMetadata);
    if (contentType != null && !contentType.isEmpty()) {
        om.setContentType(contentType);
    } else {
        om.setContentType("application/octet-stream");
    }
    putObjectRequest.setMetadata(om);
    ListenableFuture<PutObjectResult> putObjectResult = executorService.submit(new Callable<PutObjectResult>() {
        @Override
        public PutObjectResult call() throws Exception {
            PutObjectResult result;
            try {
                // the putObject call automatically closes the input
                // stream afterwards.
                result = writeOperationHelper.putObject(putObjectRequest);
            } finally {
                closeAll(LOG, uploadData, block);
            }
            return result;
        }
    });
    clearActiveBlock();
    // wait for completion
    try {
        putObjectResult.get();
    } catch (InterruptedException ie) {
        LOG.warn("Interrupted object upload", ie);
        Thread.currentThread().interrupt();
    } catch (ExecutionException ee) {
        throw extractException("regular upload", key, ee);
    }
}

From source file:org.thingsboard.server.dao.timeseries.CassandraBaseTimeseriesDao.java

@Override
public ListenableFuture<Void> removeLatest(TenantId tenantId, EntityId entityId, DeleteTsKvQuery query) {
    ListenableFuture<TsKvEntry> latestEntryFuture = findLatest(tenantId, entityId, query.getKey());

    ListenableFuture<Boolean> booleanFuture = Futures.transform(latestEntryFuture, latestEntry -> {
        long ts = latestEntry.getTs();
        if (ts > query.getStartTs() && ts <= query.getEndTs()) {
            return true;
        } else {/* w  ww.j  av  a2 s .c o m*/
            log.trace("Won't be deleted latest value for [{}], key - {}", entityId, query.getKey());
        }
        return false;
    }, readResultsProcessingExecutor);

    ListenableFuture<Void> removedLatestFuture = Futures.transformAsync(booleanFuture, isRemove -> {
        if (isRemove) {
            return deleteLatest(tenantId, entityId, query.getKey());
        }
        return Futures.immediateFuture(null);
    }, readResultsProcessingExecutor);

    final SimpleListenableFuture<Void> resultFuture = new SimpleListenableFuture<>();
    Futures.addCallback(removedLatestFuture, new FutureCallback<Void>() {
        @Override
        public void onSuccess(@Nullable Void result) {
            if (query.getRewriteLatestIfDeleted()) {
                ListenableFuture<Void> savedLatestFuture = Futures.transformAsync(booleanFuture, isRemove -> {
                    if (isRemove) {
                        return getNewLatestEntryFuture(tenantId, entityId, query);
                    }
                    return Futures.immediateFuture(null);
                }, readResultsProcessingExecutor);

                try {
                    resultFuture.set(savedLatestFuture.get());
                } catch (InterruptedException | ExecutionException e) {
                    log.warn("Could not get latest saved value for [{}], {}", entityId, query.getKey(), e);
                }
            } else {
                resultFuture.set(null);
            }
        }

        @Override
        public void onFailure(Throwable t) {
            log.warn("[{}] Failed to process remove of the latest value", entityId, t);
        }
    });
    return resultFuture;
}

From source file:de.ii.xtraplatform.ogc.api.gml.parser.GMLParser.java

public void parse(ListenableFuture<HttpEntity> entity, String ns, String ft) throws ExecutionException {

    QName featureType = new QName(ns, ft);

    LOGGER.debug("Parsing GetFeature response for '{}'", ft);
    try {//  w w w .j  a v a2 s . c o m

        ListenableFuture<SMInputCursor> rootFuture = Futures.transform(entity,
                new Function<HttpEntity, SMInputCursor>() {
                    @Override
                    public SMInputCursor apply(HttpEntity e) {
                        try {
                            return staxFactory.rootElementCursor(e.getContent()).advance();
                        } catch (IOException | IllegalStateException | XMLStreamException ex) {
                            LOGGER.debug("Error parsing WFS GetFeature (IOException) {}", ex.getMessage());
                            return null;
                        }
                    }
                });

        parseRoot(rootFuture, ns, ft);
    } finally {
        try {
            EntityUtils.consumeQuietly(entity.get());
        } catch (InterruptedException ex) {

        }
    }
}

From source file:org.opendaylight.centinel.impl.CentinelStreamImpl.java

@Override
public Future<RpcResult<GetStreamOutput>> getStream(GetStreamInput input) {
    final ReadWriteTransaction tx = dataProvider.newReadWriteTransaction();
    final SettableFuture<RpcResult<GetStreamOutput>> futureResult = SettableFuture.create();
    boolean idMatches = false;
    if (input.getStreamID() == null || input.getStreamID().isEmpty() || input.getStreamID().trim().isEmpty()) {
        LOG.debug("STREAM ID CANNOT BE NULL");
        return Futures.immediateFailedCheckedFuture(
                new TransactionCommitFailedException("inalid-input", streamIdcannotbenullError()));
    }/* w w w  . j ava2  s. co  m*/
    final GetStreamOutputBuilder getStreamOutputBuilder = new GetStreamOutputBuilder();
    ListenableFuture<Optional<StreamRecord>> streamRuleReadFuture = tx.read(LogicalDatastoreType.OPERATIONAL,
            streamRecordId);
    try {
        Optional<StreamRecord> streamRecord = streamRuleReadFuture.get();
        List<StreamList> streamList = new ArrayList<StreamList>();
        if (streamRecord.isPresent()) {
            streamList = streamRecord.get().getStreamList();
        } else {
            return Futures.immediateFailedCheckedFuture(new TransactionCommitFailedException("invalid-input",
                    RpcResultBuilder.newError(ErrorType.APPLICATION, "invalid-input",
                            "Record is not present in operational data store")));
        }
        if (streamList.isEmpty()) {
            return Futures.immediateFailedCheckedFuture(
                    new TransactionCommitFailedException("inalid-input", RpcResultBuilder
                            .newError(ErrorType.APPLICATION, "invalid-input", "NO stream in datastore")));
        } else {
            java.util.Iterator<StreamList> iterator = streamList.iterator();

            while (iterator.hasNext()) {
                StreamList streamListObj = iterator.next();
                if (streamListObj.getStreamID().equals(input.getStreamID())) {
                    idMatches = true;
                    getStreamOutputBuilder.setConfigID(streamListObj.getConfigID())
                            .setContentPack(streamListObj.getContentPack())
                            .setDescription(streamListObj.getDescription())
                            .setNodeType(streamListObj.getNodeType()).setRuleID(streamListObj.getRuleID())
                            .setRuleTypeClassifier(streamListObj.getRuleTypeClassifier())
                            .setStreamID(streamListObj.getStreamID()).setTimeStamp(streamListObj.getTimeStamp())
                            .setTitle(streamListObj.getTitle());

                    if (!streamListObj.getStreamRules().isEmpty()) {

                        Iterator<StreamRules> it = streamListObj.getStreamRules().iterator();
                        List<StreamRules> streamRule = new ArrayList<StreamRules>();
                        StreamRules streamRuleListObj = null;
                        while (it.hasNext()) {
                            streamRuleListObj = it.next();
                            streamRule.add(streamRuleListObj);
                        }
                        getStreamOutputBuilder.setStreamRules(streamRule);
                    }
                }
            }
            if (!idMatches) {
                return Futures.immediateFailedCheckedFuture(new TransactionCommitFailedException(
                        "invalid-input", RpcResultBuilder.newError(ErrorType.APPLICATION, "invalid-input",
                                "Invalid Stream id or The stream is not present in operational data store")));
            }
            futureResult.set(RpcResultBuilder.<GetStreamOutput>success(getStreamOutputBuilder.build()).build());
        }
    }

    catch (Exception ex) {
        LOG.error("Exception occured while getting record from operational data store", ex);
    }
    return futureResult;
}

From source file:org.opendaylight.topomanager.impl.BierTopologyManager.java

public BierNode getNodeData(String topologyId, String nodeId) {
    BierTopologyProcess<BierNode> processor = new BierTopologyProcess<BierNode>(dataBroker,
            BierTopologyProcess.FLAG_READ, (new BierNodeBuilder()).build());
    final InstanceIdentifier<BierNode> path = getNodePath(topologyId, nodeId);

    processor.enqueueOperation(new BierTopologyOperation() {
        @Override/*from  ww w .  j  a v  a2 s .c o m*/
        public void writeOperation(ReadWriteTransaction transaction) {
            // Auto-generated method stub
        }

        @SuppressWarnings("unchecked")
        @Override
        public ListenableFuture<Optional<BierNode>> readOperation(ReadWriteTransaction transaction) {
            ListenableFuture<Optional<BierNode>> listenableFuture = transaction.read(datastoreType, path);
            return listenableFuture;
        }
    });

    Future<ListenableFuture<BierNode>> future = EXECUTOR.submit(processor);

    try {
        ListenableFuture<BierNode> result = future.get();
        BierNode node = result.get();
        if (null == node || null == node.getNodeId()) {
            LOG.error("Get bier node is faild!");
            return null;
        }
        return node;
    } catch (InterruptedException e) {
        LOG.error("Get bier node is Interrupted by", e);
    } catch (ExecutionException e) {
        LOG.error("Get bier node is faild cause by", e);
    }
    LOG.error("Get bier node is faild!");
    return null;
}