Example usage for java.util.concurrent FutureTask get

List of usage examples for java.util.concurrent FutureTask get

Introduction

In this page you can find the example usage for java.util.concurrent FutureTask get.

Prototype

public V get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException 

Source Link

Usage

From source file:bi.meteorite.util.ITestBootstrap.java

protected String executeCommand(final String command, final Long timeout, final Boolean silent) {
    String response;//from  w w w. j  av a 2 s .  co m
    final ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
    final PrintStream printStream = new PrintStream(byteArrayOutputStream);
    final CommandProcessor commandProcessor = getOsgiService(CommandProcessor.class);
    final CommandSession commandSession = commandProcessor.createSession(System.in, printStream, System.err);
    FutureTask<String> commandFuture = new FutureTask<String>(new Callable<String>() {
        public String call() {
            try {
                if (!silent) {
                    System.err.println(command);
                }
                commandSession.execute(command);
            } catch (Exception e) {
                e.printStackTrace(System.err);
            }
            printStream.flush();
            return byteArrayOutputStream.toString();
        }
    });

    try {
        executor.submit(commandFuture);
        response = commandFuture.get(timeout, TimeUnit.MILLISECONDS);
    } catch (Exception e) {
        e.printStackTrace(System.err);
        response = "SHELL COMMAND TIMED OUT: ";
    }

    return response;
}

From source file:com.appleframework.monitor.action.LogsAction.java

@RequestMapping(value = "/projects/{projectName}/logs/more", method = RequestMethod.GET)
public void console(final HttpServletResponse response, ModelMap map, @PathVariable String projectName,
        LogQuery logQuery) throws IOException, ParseException {
    Project project = projectService.findProject(projectName);
    map.put("project", project);
    final MongoConverter converter = project.fetchMongoTemplate().getConverter();
    final DBCursor cursor = logsService.findLogs(projectName, logQuery);
    final StringBuffer buf = new StringBuffer();

    FutureTask<String> task = new FutureTask<String>(new Callable<String>() {
        @Override/*w  w  w .j  a va2s.  com*/
        public String call() throws Exception {
            long startTime = System.currentTimeMillis();
            //???20
            logger.debug("result:");
            while (cursor.hasNext()) {
                Log log = converter.read(Log.class, cursor.next());

                buf.insert(0, log.toString() + "\n");
                long current = System.currentTimeMillis();
                if ((current - startTime) / 1000 >= mongWaitSeconds)
                    break;
            }
            return buf.toString();
        }
    });
    executor.execute(task);
    try {
        task.get(mongWaitSeconds + 5, TimeUnit.SECONDS);
        cursor.close();
    } catch (Exception e) {
        logger.error("time out ", e);
        task.cancel(true);
    }

    response.setContentType("text/html;charset=UTF-8");
    response.getWriter().write(buf.toString());
    response.getWriter().flush();
}

From source file:org.apache.accumulo.miniclusterImpl.MiniAccumuloClusterImpl.java

int stopProcessWithTimeout(final Process proc, long timeout, TimeUnit unit)
        throws InterruptedException, ExecutionException, TimeoutException {
    FutureTask<Integer> future = new FutureTask<>(() -> {
        proc.destroy();//from   w  w w. j  av a  2  s  . c o  m
        return proc.waitFor();
    });

    executor.execute(future);

    return future.get(timeout, unit);
}

From source file:com.skymobi.monitor.action.LogsAction.java

@RequestMapping(value = "/projects/{projectName}/logs/more", method = RequestMethod.GET)
public void console(final HttpServletResponse response, ModelMap map, @PathVariable String projectName,
        LogQuery logQuery) throws IOException, ParseException {
    Project project = projectService.findProject(projectName);
    map.put("project", project);
    final MongoConverter converter = project.fetchMongoTemplate().getConverter();
    final DBCursor cursor = logsService.findLogs(projectName, logQuery);
    final StringBuffer buf = new StringBuffer();
    @SuppressWarnings("unchecked")
    FutureTask<String> task = new FutureTask(new Callable<String>() {
        @Override/* www . j a  v a2s.c  o  m*/
        public String call() throws Exception {
            long startTime = System.currentTimeMillis();
            //???20
            logger.debug("result:");
            while (cursor.hasNext()) {
                Log log = converter.read(Log.class, cursor.next());

                buf.insert(0, log.toString() + "\n");
                long current = System.currentTimeMillis();
                if ((current - startTime) / 1000 >= mongWaitSeconds)
                    break;
            }
            return buf.toString();
        }
    });
    executor.execute(task);
    try {
        task.get(mongWaitSeconds + 5, TimeUnit.SECONDS);
        cursor.close();
    } catch (Exception e) {
        logger.error("time out ", e);
        task.cancel(true);
    }

    response.setContentType("text/html;charset=UTF-8");
    response.getWriter().write(buf.toString());
    response.getWriter().flush();

}

From source file:com.eclectide.intellij.whatthecommit.WhatTheCommitAction.java

public String loadCommitMessage(final String url) {
    final FutureTask<String> downloadTask = new FutureTask<String>(new Callable<String>() {
        public String call() {
            final HttpClient client = new HttpClient();
            final GetMethod getMethod = new GetMethod(url);
            try {
                final int statusCode = client.executeMethod(getMethod);
                if (statusCode != HttpStatus.SC_OK)
                    throw new RuntimeException("Connection error (HTTP status = " + statusCode + ")");
                return getMethod.getResponseBodyAsString();
            } catch (IOException e) {
                throw new RuntimeException(e.getMessage(), e);
            }/* w  w w .ja  v  a2 s.c o  m*/
        }
    });

    ApplicationManager.getApplication().executeOnPooledThread(downloadTask);

    try {
        return downloadTask.get(TIMEOUT_SECONDS, TimeUnit.SECONDS);
    } catch (TimeoutException e) {
        // ignore
    } catch (Exception e) {
        throw new RuntimeException(e.getMessage(), e);
    }

    if (!downloadTask.isDone()) {
        downloadTask.cancel(true);
        throw new RuntimeException("Connection timed out");
    }

    return "";
}

From source file:org.opencms.workflow.CmsDefaultWorkflowManager.java

/**
 * The implementation of the "publish" workflow action.<p>
 *
 * @param userCms the user CMS context//from w  w  w . j a  v a2  s . c  o m
 * @param options the publish options
 * @param resources the resources which the action should process
 *
 * @return the workflow response
 * @throws CmsException if something goes wrong
 */
protected CmsWorkflowResponse actionPublish(CmsObject userCms, CmsPublishOptions options,
        final List<CmsResource> resources) throws CmsException {

    final CmsPublish publish = new CmsPublish(userCms, options);
    // use FutureTask to get the broken links, because we can then use a different thread if it takes too long
    final FutureTask<List<CmsPublishResource>> brokenResourcesGetter = new FutureTask<List<CmsPublishResource>>(
            new Callable<List<CmsPublishResource>>() {

                public List<CmsPublishResource> call() throws Exception {

                    return publish.getBrokenResources(resources);
                }
            });

    Thread brokenResourcesThread = new Thread(brokenResourcesGetter);
    brokenResourcesThread.start();
    try {
        List<CmsPublishResource> brokenResources = brokenResourcesGetter.get(10, TimeUnit.SECONDS);
        if (brokenResources.size() == 0) {
            publish.publishResources(resources);
            CmsWorkflowResponse response = new CmsWorkflowResponse(true, "",
                    new ArrayList<CmsPublishResource>(), new ArrayList<CmsWorkflowAction>(), null);
            return response;
        } else {
            String brokenResourcesLabel = getLabel(userCms, Messages.GUI_BROKEN_LINKS_0);
            boolean canForcePublish = OpenCms.getWorkplaceManager().getDefaultUserSettings()
                    .isAllowBrokenRelations() || OpenCms.getRoleManager().hasRole(userCms, CmsRole.VFS_MANAGER);
            List<CmsWorkflowAction> actions = new ArrayList<CmsWorkflowAction>();
            if (canForcePublish) {
                String forceLabel = getLabel(userCms, Messages.GUI_WORKFLOW_ACTION_FORCE_PUBLISH_0);
                actions.add(new CmsWorkflowAction(ACTION_FORCE_PUBLISH, forceLabel, true, true));
            }
            CmsWorkflowResponse response = new CmsWorkflowResponse(false, brokenResourcesLabel, brokenResources,
                    actions, null);
            return response;
        }
    } catch (TimeoutException e) {
        // Things are taking too long, do them in a different thread and just return "OK" to the client
        Thread thread = new Thread() {

            @SuppressWarnings("synthetic-access")
            @Override
            public void run() {

                LOG.info(
                        "Checking broken relations is taking too long, using a different thread for checking and publishing now.");
                try {
                    // Make sure the computation is finished by calling get() without a timeout parameter
                    // We don't need the actual result of the get(), though; we just get the set of resource paths from the validator object
                    brokenResourcesGetter.get();
                    List<CmsResource> resourcesToPublish = new ArrayList<CmsResource>(resources);
                    Iterator<CmsResource> resIter = resourcesToPublish.iterator();
                    while (resIter.hasNext()) {
                        CmsResource currentRes = resIter.next();
                        if (publish.getRelationValidator().keySet().contains(currentRes.getRootPath())) {
                            resIter.remove();
                            LOG.info("Excluding resource from publish list because relations would be broken: "
                                    + currentRes.getRootPath());
                        }
                    }
                    publish.publishResources(resourcesToPublish);
                } catch (Exception ex) {
                    LOG.error(ex.getLocalizedMessage(), ex);
                }
            }
        };
        thread.start();
        CmsWorkflowResponse response = new CmsWorkflowResponse(true, "", new ArrayList<CmsPublishResource>(),
                new ArrayList<CmsWorkflowAction>(), null);
        return response;
    } catch (InterruptedException e) {
        // shouldn't happen; log exception
        LOG.error(e.getLocalizedMessage());
        return null;
    } catch (ExecutionException e) {
        // shouldn't happen; log exception
        LOG.error(e.getLocalizedMessage());
        return null;
    }
}

From source file:com.impetus.ankush2.hadoop.utils.HadoopUtils.java

/**
 * Gets the json object using callable.//ww w.j a va  2  s  . c o  m
 * 
 * @param url
 *            the url
 * @return the json object using callable
 * @throws AnkushException
 *             the ankush exception
 */
public static JSONObject getJsonObjectUsingCallable(String url) throws AnkushException {
    String errMsg = "Could not get JSON object for URL-" + url + ".";
    try {
        long waitTime = AppStoreWrapper.getAnkushConfReader().getLongValue("hadoop.jmxmonitoring.wait.time");
        CallableRestJsonData callableRestJsonData = new CallableRestJsonData(url);

        FutureTask<JSONObject> futureTaskJmxBeanData = new FutureTask<JSONObject>(callableRestJsonData);

        AppStoreWrapper.getExecutor().execute(futureTaskJmxBeanData);

        JSONObject beanObject = futureTaskJmxBeanData.get(waitTime, TimeUnit.MILLISECONDS);
        if (beanObject == null) {
            throw new AnkushException(errMsg);
        }
        return beanObject;
    } catch (AnkushException e) {
        throw e;
    } catch (Exception e) {
        LOG.error(errMsg, Constant.Component.Name.HADOOP, e);
        throw new AnkushException(errMsg);
    }
}

From source file:uk.bl.dpt.qa.ProcessIsolatedTika.java

/**
 * Parse an inputstream and populate a Metadata object
 * @param pInputStream stream to analyse 
 * @param pMetadata metadata object to populate
 * @param pOutputStream output to write data to
 * @return true if processed ok, false if execution was terminated
 *//* w  w  w .j a  v  a 2 s  . c o  m*/
public boolean parse(final InputStream pInputStream, final Metadata pMetadata) {

    boolean ret = true;

    if (!gRunner.isRunning()) {
        gLogger.error("Tika-Server is not running");
        return false;
    }

    final String TIKA_PATH = "/meta";
    final String END_POINT = "http://" + TIKA_LOCAL_HOST + ":" + TIKA_SERVER_PORT;

    gLogger.trace("Server: " + END_POINT + TIKA_PATH);

    final String detectedType = pMetadata.get(Metadata.CONTENT_TYPE);

    FutureTask<Integer> task = new FutureTask<Integer>(new Callable<Integer>() {
        @Override
        public Integer call() throws Exception {

            gResponse = WebClient.create(END_POINT + TIKA_PATH).accept("text/csv")
                    // give the parsers a hint
                    .type(detectedType)
                    // protect the stream from being closed
                    .put(new CloseShieldInputStream(pInputStream));

            return null;
        }
    });

    Thread thread = new Thread(task);
    thread.start();

    try {
        task.get(TIMEOUT_SECS * 1000, TimeUnit.MILLISECONDS);
    } catch (InterruptedException e) {
        gLogger.info("InterruptedException: " + e);
        ret = false;
        restart();
    } catch (ExecutionException e) {
        gLogger.info("ExecutionException: " + e);
        ret = false;
        restart();
    } catch (TimeoutException e) {
        gLogger.info("TimeoutException: " + e);
        ret = false;
        restart();
    }

    if (gResponse != null) {
        if (gResponse.getStatus() == Status.UNSUPPORTED_MEDIA_TYPE.getStatusCode()) {
            // the server may return HTTP 415 (unsupported) if it won't accept the mimetype
            // handle this issue here
            // add some text to the output
            // FIXME: maybe change mimetype for a more visible error?
            pMetadata.add("parseFailure415", "true");
            gLogger.error("Parse Failure: HTTP 415 (format unsupported for parsing)");
        } else {
            if (gResponse.getEntity() instanceof InputStream) {
                InputStream is = (InputStream) gResponse.getEntity();
                BufferedReader reader = new BufferedReader(new InputStreamReader(is));
                try {
                    Iterable<CSVRecord> records = CSVFormat.DEFAULT.parse(reader);
                    for (CSVRecord record : records) {
                        pMetadata.add(record.get(0), record.get(1));
                    }
                } catch (IOException e1) {
                    // TODO Auto-generated catch block
                    e1.printStackTrace();
                    ret = false;
                } finally {
                    if (reader != null) {
                        try {
                            reader.close();
                        } catch (IOException e) {
                            // TODO Auto-generated catch block
                            e.printStackTrace();
                        }
                    }
                }
            }
        }
    }

    gLogger.info("Metadata entries: " + pMetadata.names().length);

    return ret;
}

From source file:com.impetus.ankush2.hadoop.utils.HadoopUtils.java

/**
 * Gets the jmx bean using callable.//from  w ww.  j  a v a 2 s. c  o  m
 * 
 * @param host
 *            the host
 * @param clientPort
 *            the client port
 * @param beanName
 *            the bean name
 * @return the jmx bean using callable
 * @throws AnkushException
 *             the ankush exception
 */
public static Map<String, Object> getJmxBeanUsingCallable(String host, String clientPort, String beanName)
        throws AnkushException {
    String errMsg = "Could not get JMX bean data for host-" + host + ", port-" + clientPort + ".";
    try {
        long waitTime = AppStoreWrapper.getAnkushConfReader().getLongValue("hadoop.jmxmonitoring.wait.time");
        CallableJmxBeanData callableJmxBeanData = new CallableJmxBeanData(host, clientPort, beanName);
        FutureTask<Map<String, Object>> futureTaskJmxBeanData = new FutureTask<Map<String, Object>>(
                callableJmxBeanData);

        AppStoreWrapper.getExecutor().execute(futureTaskJmxBeanData);

        Map<String, Object> beanObject = futureTaskJmxBeanData.get(waitTime, TimeUnit.MILLISECONDS);
        if (beanObject == null) {
            throw new AnkushException(errMsg);
        }
        return beanObject;
    } catch (AnkushException e) {
        throw e;
    } catch (Exception e) {
        e.printStackTrace();
        LOG.error(errMsg, Constant.Component.Name.HADOOP, host, e);
        throw new AnkushException(errMsg);
    }
}

From source file:org.sakaiproject.tool.impl.SessionComponentRegressionTest.java

/**
 * Ensures {@link Session} has entity semantics, i.e. the same
 * object is returned to each request for that object. "Always"
 * here is limited to non-expired sessions.
 * //ww  w  .j ava  2s. co  m
 * @throws TimeoutException 
 * @throws ExecutionException 
 * @throws InterruptedException 
 */
public void testGetSessionAlwaysReturnsSessionCreatedByStartSession()
        throws InterruptedException, ExecutionException, TimeoutException {
    final Session startedSession = startSessionForUser();
    assertSame(startedSession, sessionComponent.getSession(startedSession.getId()));
    assertSame(startedSession, sessionComponent.getSession(startedSession.getId())); // intentional duplicate
    // all threads should get the same Session obj for a given key
    FutureTask<Session> asynchGet = new FutureTask<Session>(new Callable<Session>() {
        public Session call() {
            return sessionComponent.getSession(startedSession.getId());
        }
    });
    new Thread(asynchGet).start();
    assertSame(startedSession, asynchGet.get(1, TimeUnit.SECONDS));
}