List of usage examples for com.google.common.util.concurrent MoreExecutors sameThreadExecutor
@Deprecated @GwtIncompatible("TODO") public static ListeningExecutorService sameThreadExecutor()
From source file:org.robotninjas.concurrent.FluentFutures.java
/** * * @param input1/*www . ja va 2 s . co m*/ * @param input2 * @param combine * @param <X> * @param <Y> * @param <Z> * @return */ public static <X, Y, Z> FluentFuture<Z> combine(ListenableFuture<X> input1, ListenableFuture<Y> input2, Combine2<X, Y, Z> combine) { return combine(input1, input2, combine, MoreExecutors.sameThreadExecutor()); }
From source file:com.palantir.giraffe.command.CommandExitLatch.java
/** * Register a new CommandFuture on the Listener. * @param future the CommandFuture to register * @throws IllegalStateException if the Listener has already been started. *//* ww w. j a v a2 s . c o m*/ public void register(final CommandFuture future) { synchronized (lock) { if (started) { throw new IllegalStateException( "Cannot register new commands after " + "ExecutionSystemShutdownListener has started"); } futures.add(future); future.addListener(new Runnable() { @Override public void run() { finish(future); } }, MoreExecutors.sameThreadExecutor()); } }
From source file:org.opendaylight.controller.md.sal.binding.test.DataBrokerTestCustomizer.java
public DOMStore createConfigurationDatastore() { final InMemoryDOMDataStore store = new InMemoryDOMDataStore("CFG", MoreExecutors.sameThreadExecutor()); schemaService.registerSchemaContextListener(store); return store; }
From source file:com.helion3.prism.api.query.QueryBuilder.java
/** * Builds a {@link Query} by parsing an array of arguments. * * @param parameters String[] Parameter:value list * @return {@link Query} Database query object *///from w w w . j a va 2 s. c o m public static CompletableFuture<Query> fromArguments(QuerySession session, @Nullable String[] arguments) throws ParameterException { checkNotNull(session); Query query = new Query(); CompletableFuture<Query> future = new CompletableFuture<Query>(); // Track all parameter pairs Map<String, String> definedParameters = new HashMap<String, String>(); if (arguments.length > 0) { List<ListenableFuture<?>> futures = new ArrayList<ListenableFuture<?>>(); for (String arg : arguments) { Optional<ListenableFuture<?>> listenable; if (flagPattern.matcher(arg).matches()) { listenable = parseFlagFromArgument(session, query, arg); } else { // Get alias/value pair Pair<String, String> pair = getParameterKeyValue(arg); // Parse for handler listenable = parseParameterFromArgument(session, query, pair); // Add to list of defined definedParameters.put(pair.getKey(), pair.getValue()); } if (listenable.isPresent()) { futures.add(listenable.get()); } } if (!futures.isEmpty()) { ListenableFuture<List<Object>> combinedFuture = Futures.allAsList(futures); combinedFuture.addListener(new Runnable() { @Override public void run() { future.complete(query); } }, MoreExecutors.sameThreadExecutor()); } else { future.complete(query); } } else { future.complete(query); } if (Prism.getConfig().getNode("defaults", "enabled").getBoolean()) { // Require any parameter defaults String defaultsUsed = ""; for (ParameterHandler handler : Prism.getParameterHandlers()) { boolean aliasFound = false; for (String alias : handler.getAliases()) { if (definedParameters.containsKey(alias)) { aliasFound = true; break; } } if (!aliasFound) { Optional<Pair<String, String>> pair = handler.processDefault(session, query); if (pair.isPresent()) { defaultsUsed += pair.get().getKey() + ":" + pair.get().getValue() + " "; } } } // @todo should move this if (!defaultsUsed.isEmpty()) { session.getCommandSource().get().sendMessage( Format.subduedHeading(Text.of(String.format("Defaults used: %s", defaultsUsed)))); } } return future; }
From source file:info.archinnov.achilles.internal.async.AsyncUtils.java
public void maybeAddAsyncListeners(ListenableFuture<?> listenableFuture, FutureCallback<Object>[] asyncListeners) { maybeAddAsyncListeners(listenableFuture, asyncListeners, MoreExecutors.sameThreadExecutor()); }
From source file:org.elasticsearch.threadpool.transport.TransportThreadPool.java
public TransportThreadPool(Settings settings) { this.settings = settings; Map<String, Settings> groupSettings = settings.getGroups("threadpool"); Map<String, ExecutorHolder> executors = Maps.newHashMap(); executors.put(Names.GENERIC, build(Names.GENERIC, "cached", groupSettings.get(Names.GENERIC), settingsBuilder().put("keep_alive", "30s").build())); executors.put(org.elasticsearch.threadpool.ThreadPool.Names.INDEX, build(org.elasticsearch.threadpool.ThreadPool.Names.INDEX, "cached", groupSettings.get(org.elasticsearch.threadpool.ThreadPool.Names.INDEX), ImmutableSettings.Builder.EMPTY_SETTINGS)); executors.put(org.elasticsearch.threadpool.ThreadPool.Names.BULK, build(org.elasticsearch.threadpool.ThreadPool.Names.BULK, "cached", groupSettings.get(org.elasticsearch.threadpool.ThreadPool.Names.BULK), ImmutableSettings.Builder.EMPTY_SETTINGS)); executors.put(Names.SAME, new ExecutorHolder(MoreExecutors.sameThreadExecutor(), new TransportThreadPoolInfoElement(Names.SAME, "same"))); this.executors = ImmutableMap.copyOf(executors); this.scheduler = (ScheduledThreadPoolExecutor) Executors.newScheduledThreadPool(1, ClientEsExecutors.daemonThreadFactory(settings, "scheduler")); this.scheduler.setExecuteExistingDelayedTasksAfterShutdownPolicy(false); this.scheduler.setContinueExistingPeriodicTasksAfterShutdownPolicy(false); //TimeValue estimatedTimeInterval = componentSettings.getAsTime("estimated_time_interval", TimeValue.timeValueMillis(200)); TimeValue estimatedTimeInterval = TimeValue.timeValueMillis(200); this.estimatedTimeThread = new EstimatedTimeThread(ClientEsExecutors.threadName(settings, "[timer]"), estimatedTimeInterval.millis()); this.estimatedTimeThread.start(); }
From source file:org.trinity.shellplugin.wm.x11.impl.protocol.icccm.AbstractCachedProtocol.java
public ListenableFuture<Void> addProtocolListener(@Nonnull final DisplaySurface xWindow, @Nonnull final ProtocolListener<P> listener, @Nonnull final ExecutorService executor) { return this.displayExecutor.submit(new Callable<Void>() { @Override/*ww w. j av a2s . c o m*/ public Void call() throws Exception { AsyncListenable listeners = AbstractCachedProtocol.this.listenersByWindow.get(xWindow); if (listeners == null) { listeners = new AsyncListenableEventBus(MoreExecutors.sameThreadExecutor()); listenersByWindow.put(xWindow, listeners); } listeners.register(listener, executor); return null; } }); }
From source file:com.google.idea.blaze.base.experiments.WebExperimentSyncer.java
/** * Fetch and process the experiments on the current thread. *//*from ww w.j a v a2s .c o m*/ private void initialize() { ListenableFuture<String> response = MoreExecutors.sameThreadExecutor() .submit(new WebExperimentsDownloader()); response.addListener(new WebExperimentsResultProcessor(response, false), MoreExecutors.sameThreadExecutor()); // Failed to fetch, try to load cache from disk if (experimentValues == null) { experimentValues = loadCache(); } // There must have been an error retrieving the experiments. if (experimentValues == null) { experimentValues = ImmutableMap.of(); } }
From source file:com.joyveb.dbpimpl.cass.prepare.schema.AbstractQueryOperation.java
protected Executor getExecutor() { return executor != null ? executor : MoreExecutors.sameThreadExecutor(); }
From source file:uk.ac.susx.tag.method51.core.organise.JobRunner.java
private void runJob() throws Exception { manager.addListener(new Manager.Listener() { @Override/*from w ww . jav a 2s. c o m*/ public void healthy(@Nonnull Manager source) { jobStatus = "healthy"; } @Override public void stopped(@Nonnull Manager source) { jobStatus = "stopped"; } @Override public void failure(@Nonnull Manager source, @Nonnull Agent agent, @Nonnull Throwable cause) { jobStatus = "failed"; } }, MoreExecutors.sameThreadExecutor()); // which are made here for (Component c : job.components.get()) { Agent a = (Agent) c.makeComponent(); LOG.info("Instantiated component: {}", a.getClass().getSimpleName()); instances.add(a); manager.add(a); } Map<Pair<Producer, Consumer>, Params> labelledEdges = new HashMap<>(); // now we hook them all together for (Job.Edge e : job.edges.get()) { try { Producer producer = (Producer) instances.get(e.from.get()); Consumer consumer = (Consumer) instances.get(e.to.get()); LOG.info("Creating edge: {}[{}] -> {}[{}]", producer.getClass().getSimpleName(), e.fromLabel.get(), consumer.getClass().getSimpleName(), e.toLabel.get()); if (!e.toLabel.get().equals(Component.DEFAULT_INPUT_LABEL)) { Labeler.Options opts = Params.instance(Labeler.Options.class); opts.setLabel(e.toLabel.get()); Labeler labeler = new Labeler(opts); labeler.addOutput(consumer); consumer = labeler; } if (e.fromLabel.get().equals(Component.DEFAULT_OUTPUT_LABEL) || consumer.getClass().getAnnotation(LabelledConsumer.class) != null) { producer.addOutput(consumer); } else { Pair<Producer, Consumer> pc = new ImmutablePair<>(producer, consumer); //labelled edges from the same producer to the same consumer must pass through the same filter instance //in order to keep synchronised data if (labelledEdges.containsKey(pc)) { if (producer instanceof BatchProducer) { ((BatchLabelWhitelistFilter.Options) labelledEdges.get(pc)) .addAcceptedLabel(e.fromLabel.get()); } else { ((LabelWhitelistFilter.Options) labelledEdges.get(pc)) .addAcceptedLabel(e.fromLabel.get()); } } else { if (producer instanceof BatchProducer) { BatchUnlabeler.Options unlabellerOptions = Params .instance(BatchUnlabeler.Options.class); unlabellerOptions.setParent(job); BatchUnlabeler unlabeler = new BatchUnlabeler(unlabellerOptions); BatchLabelWhitelistFilter.Options whiteListOptions = Params .instance(BatchLabelWhitelistFilter.Options.class); whiteListOptions.setParent(job); whiteListOptions.addAcceptedLabel(e.fromLabel.get()); BatchLabelWhitelistFilter whitelistFilter = new BatchLabelWhitelistFilter( whiteListOptions); producer.addOutput(whitelistFilter); whitelistFilter.addOutput(unlabeler); unlabeler.addOutput(consumer); manager.add(whitelistFilter); manager.add(unlabeler); labelledEdges.put(new ImmutablePair<>(producer, consumer), whiteListOptions); } else { Unlabeler.Options unlabellerOptions = Params.instance(Unlabeler.Options.class); unlabellerOptions.setParent(job); Unlabeler unlabeler = new Unlabeler(unlabellerOptions); LabelWhitelistFilter.Options whiteListOptions = Params .instance(LabelWhitelistFilter.Options.class); whiteListOptions.setParent(job); whiteListOptions.addAcceptedLabel(e.fromLabel.get()); LabelWhitelistFilter whitelistFilter = new LabelWhitelistFilter(whiteListOptions); producer.addOutput(whitelistFilter); whitelistFilter.addOutput(unlabeler); unlabeler.addOutput(consumer); manager.add(whitelistFilter); manager.add(unlabeler); labelledEdges.put(new ImmutablePair<>(producer, consumer), whiteListOptions); } } } } catch (ClassCastException e1) { throw new RuntimeException("Invalid edge from " + e.from + " to " + e.to, e1); } } if (job.logProfiles.get()) { Profiler profiler = Profiler.newInstance(manager, 1, TimeUnit.SECONDS, new SLF4JLoggingProfilerListener(LOG, LogLevel.INFO, ProfileFormat.JSON)); profiler.startAsync(); } // maybe this job needs a server running too. Server server = startServer(); manager.startAsync(); manager.awaitStopped(); // goodbye world if (server != null) { server.stop(); } }