Example usage for twitter4j Query setLang

List of usage examples for twitter4j Query setLang

Introduction

In this page you can find the example usage for twitter4j Query setLang.

Prototype

public void setLang(String lang) 

Source Link

Document

restricts tweets to the given language, given by an <a href="http://en.wikipedia.org/wiki/ISO_639-1">ISO 639-1 code</a>

Usage

From source file:TwitterPull.java

public void retrieveTweets() {
    try {/*from  w ww.j  av a2s  .  c om*/
        Query query = new Query(this.queryString);
        query.setLang("en");
        query.setCount(100);
        QueryResult result;
        int i = 0;
        do {
            result = twitter.search(query);
            List<Status> tweets = result.getTweets();
            //                int i = 0;
            for (Status tweet : tweets) {
                System.out.println(tweet.getText().replaceAll("\n", "").replaceAll("\r", ""));
                //                    appendTweetDocument(tweet.getText());
            }
            i++;
        } while ((query = result.nextQuery()) != null && i < 10);
        //            setTwitterFeed(tweets);

        //            System.exit(0);
    } catch (TwitterException te) {
        Logger.getLogger(TwitterPull.class.getName()).log(Level.SEVERE, null, te);
        System.out.println("Failed to search tweets: " + te.getMessage());
        System.exit(-1);
    }
}

From source file:ch.schrimpf.core.TwitterCrawler.java

License:Open Source License

/**
 * @param queryString describes keywords and filters
 * @return an initialized Query/*from  ww  w . jav a 2  s.c o  m*/
 */
private Query initQuery(String queryString) {
    Query query = new Query(queryString);
    try {
        Properties prop = new Properties();
        prop.load(new FileInputStream("easyTwitterCrawler.properties"));
        query.setCount(Integer.parseInt(prop.getProperty("queryLimit")));
        query.setLocale(prop.getProperty("locale"));
        query.setLang(prop.getProperty("lang"));
        GeoLocation location = new GeoLocation(Double.parseDouble(prop.getProperty("latitude")),
                Double.parseDouble(prop.getProperty("longitude")));
        double radius = Double.parseDouble(prop.getProperty("radius"));
        query.setGeoCode(location, radius, Query.KILOMETERS);
    } catch (IOException e) {
        // Properties could not be load
        query.setCount(DEFAULT_QUERY_LIMIT);
        query.setLocale(DEFAULT_LOCALE);
        query.setLang(DEFAULT_LANG);
        query.setGeoCode(DEFAULT_GEO_LOCATION, DEFAULT_RADIUS, Query.KILOMETERS);
    }
    return query;
}

From source file:Classes.TwitterPull.java

public void retrieveTweets() throws TwitterException {
    Query query = new Query("\"" + this.queryString + "\"");
    query.setLang("en");
    query.setCount(100);//from   w w  w . j a v a2 s  . co m
    QueryResult result;
    int i = 0;
    do {
        result = twitter.search(query);
        List<Status> tweets = result.getTweets();
        for (Status tweet : tweets) {
            String t = tweet.getText().replaceAll("\n", "").replaceAll("\r", "");
            //                appendTweetDocument(t);
            retrievedTweets.add(t);
        }
        i++;
    } while ((query = result.nextQuery()) != null && i < 50);

}

From source file:Collector.TweetCollector.java

public static List<Status> getTweets(final String q) {
    Timer timer = new Timer();
    TimerTask hourlyTask = new TimerTask() {

        @Override//ww w. java2 s .c o  m
        public void run() {

            long amountOfTweets = 0;

            try {

                long maxID = -1;

                Query query = new Query(q);
                //printTimeLine(query);
                Map<String, RateLimitStatus> rateLimitStatus = twitter.getRateLimitStatus("search");
                RateLimitStatus searchLimit = rateLimitStatus.get("/search/tweets");
                for (int batchNumber = 0; MAX_QUERIES < 10; batchNumber++) {

                    System.out.printf("\n\n!!! batch %d\n\n", batchNumber);

                    if (searchLimit.getRemaining() == 0) {
                        // so as to not get blocked by twitter
                        Thread.sleep(searchLimit.getSecondsUntilReset() + 3 * 1001);
                    }

                    query.setCount(TWEETS_PER_QUERY);// constant value of 100
                    query.setResultType(Query.ResultType.recent);
                    query.setLang("en");// only english tweets

                    if (maxID != -1) {
                        query.setMaxId(maxID - 1);// so the first querys not set to previous max
                    }
                    QueryResult result = twitter.search(query);
                    if (result.getTweets().size() == 0) {
                        break;
                    }

                    for (Status s : result.getTweets()) {
                        amountOfTweets++;
                        if (maxID == -1 || s.getId() < maxID) {
                            maxID = s.getId();
                        }
                        storeTweet(s);// where stored in db

                        System.out.printf("At%s : %s\n", // debugging purposes
                                s.getCreatedAt().toString(), s.getText());
                        searchLimit = result.getRateLimitStatus(); //resets
                        System.out.printf("\n\nA total of %d tweet retrieved\n", amountOfTweets);

                    }

                }

            } catch (TwitterException te) {

                System.out.println("Error Code :" + te.getErrorCode());
                System.out.println("Exception Code " + te.getExceptionCode());
                System.out.println("Status Code " + te.getStatusCode());

                if (te.getStatusCode() == 401) {
                    System.out.println("Twitter Error :\nAuthentication "
                            + "credentials (https://dev.twitter.com/auth) "
                            + " are either missing of incorrect, " + "\nplease check consumer key /secret");
                }
            } catch (InterruptedException ex) {

            }

        }
    };

    // schedule the task to run starting now and then every hour...
    timer.schedule(hourlyTask, 0l, 1000 * 60 * 60);
    return statuses;

}

From source file:com.data.dataanalytics.twitter.TwitterFeed.java

public List<Tweet> getTweets(String search) {
    //   We're curious how many tweets, in total, we've retrieved.  Note that TWEETS_PER_QUERY is an upper limit,
    //   but Twitter can and often will retrieve far fewer tweets

    twitter4j.Twitter twitter = getTwitter();

    /*   This variable is the key to our retrieving multiple blocks of tweets.  In each batch of tweets we retrieve,
       we use this variable to remember the LOWEST tweet ID.  Tweet IDs are (java) longs, and they are roughly
       sequential over time.  Without setting the MaxId in the query, Twitter will always retrieve the most
       recent tweets.  Thus, to retrieve a second (or third or ...) batch of Tweets, we need to set the Max Id
       in the query to be one less than the lowest Tweet ID we've seen already.  This allows us to page backwards
       through time to retrieve additional blocks of tweets*/
    long maxID = -1;

    try {/*from  w  w  w .  j a  v a2s.  c o m*/
        //   There are limits on how fast you can make API calls to Twitter, and if you have hit your limit
        //   and continue to make calls Twitter will get annoyed with you.  I've found that going past your
        //   limits now and then doesn't seem to be problematic, but if you have a program that keeps banging
        //   the API when you're not allowed you will eventually get shut down.
        //
        //   Thus, the proper thing to do is always check your limits BEFORE making a call, and if you have
        //   hit your limits sleeping until you are allowed to make calls again.
        //
        //   Every time you call the Twitter API, it tells you how many calls you have left, so you don't have
        //   to ask about the next call.  But before the first call, we need to find out whether we're already
        //   at our limit.

        //   This returns all the various rate limits in effect for us with the Twitter API
        Map<String, RateLimitStatus> rateLimitStatus = twitter.getRateLimitStatus("search");

        //   This finds the rate limit specifically for doing the search API call we use in this program
        RateLimitStatus searchTweetsRateLimit = rateLimitStatus.get("/search/tweets");

        //   Always nice to see these things when debugging code...
        System.out.printf("You have %d calls remaining out of %d, Limit resets in %d seconds\n",
                searchTweetsRateLimit.getRemaining(), searchTweetsRateLimit.getLimit(),
                searchTweetsRateLimit.getSecondsUntilReset());

        //   This is the loop that retrieve multiple blocks of tweets from Twitter
        for (int queryNumber = 0; queryNumber < MAX_QUERIES; queryNumber++) {
            System.out.printf("\n\n!!! Starting loop %d\n\n", queryNumber);
            //   Delay
            if (searchTweetsRateLimit.getRemaining() == 0) {
                System.out.printf("!!! Sleeping for %d seconds due to rate limits\n",
                        searchTweetsRateLimit.getSecondsUntilReset());

                //   If you sleep exactly the number of seconds, you can make your query a bit too early
                //   and still get an error for exceeding rate limitations
                //
                //    Adding two seconds seems to do the trick. Sadly, even just adding one second still triggers a
                //   rate limit exception more often than not.  I have no idea why, and I know from a Comp Sci
                //   standpoint this is really bad, but just add in 2 seconds and go about your business.  Or else.
                Thread.sleep((searchTweetsRateLimit.getSecondsUntilReset() + 2) * 1000l);
            }

            Query q = new Query(search); // Search for tweets that contains this term
            q.setCount(TWEETS_PER_QUERY); // How many tweets, max, to retrieve
            //q.resultType("recent");                  // Get all tweets
            q.setLang("en"); // English language tweets, please

            //   If maxID is -1, then this is our first call and we do not want to tell Twitter what the maximum
            //   tweet id is we want to retrieve.  But if it is not -1, then it represents the lowest tweet ID
            //   we've seen, so we want to start at it-1 (if we start at maxID, we would see the lowest tweet
            //   a second time...
            if (maxID != -1) {
                q.setMaxId(maxID - 1);
            }

            //   This actually does the search on Twitter and makes the call across the network
            QueryResult r = twitter.search(q);

            //   If there are NO tweets in the result set, it is Twitter's way of telling us that there are no
            //   more tweets to be retrieved.  Remember that Twitter's search index only contains about a week's
            //   worth of tweets, and uncommon search terms can run out of week before they run out of tweets
            if (r.getTweets().size() == 0) {
                break; // Nothing? We must be done
            }

            //   loop through all the tweets and process them. Need to save as CSV file for database
            for (Status s : r.getTweets()) { // Loop through all the tweets...
                //   Increment our count of tweets retrieved

                //   Keep track of the lowest tweet ID.  If you do not do this, you cannot retrieve multiple
                //   blocks of tweets...
                if (maxID == -1 || s.getId() < maxID) {
                    maxID = s.getId();
                }

                //   Do something with the tweet....
                ta.processTweets(s, new Date());

            }

            //   As part of what gets returned from Twitter when we make the search API call, we get an updated
            //   status on rate limits.  We save this now so at the top of the loop we can decide whether we need
            //   to sleep or not before making the next call.
            searchTweetsRateLimit = r.getRateLimitStatus();
        }

    } catch (Exception e) {
        //   Catch all -- you're going to read the stack trace and figure out what needs to be done to fix it
        System.out.println("That didn't work well...wonder why?");

        e.printStackTrace();

    }

    System.out.printf("\n\nA total of %d tweets retrieved\n", ta.getTotalTweets());
    System.out.println("The total amount of tweets in an hour " + ta.getTweetsInAnHour());
    ta.checkIfTrending(ta.getTweetsInAnHour(), ta.getTotalTweets());

    return ta.getTweetList();
}

From source file:com.javielinux.database.EntitySearch.java

License:Apache License

public Query getQuery(Context cnt) {
    String q = this.getString("words_and");

    if (!this.getString("words_or").equals("")) {
        q += Utils.getQuotedText(this.getString("words_or"), "OR ", false);
    }/*from  w w  w  .  j  a  v  a2  s . c o  m*/

    if (!this.getString("words_not").equals("")) {
        q += Utils.getQuotedText(this.getString("words_not"), "-", true);
    }

    if (!this.getString("from_user").equals("")) {
        q += " from:" + this.getString("from_user");
    }

    if (!this.getString("to_user").equals("")) {
        q += " to:" + this.getString("to_user");
    }

    if (!this.getString("source").equals("")) {
        q += " source:" + this.getString("source");
    }

    if (this.getInt("attitude") == 1)
        q += " :)";
    if (this.getInt("attitude") == 2)
        q += " :(";

    String modLinks = "filter:links";
    String websVideos = "twitvid OR youtube OR vimeo OR youtu.be";
    String webPhotos = "lightbox.com OR mytubo.net OR imgur.com OR instagr.am OR twitpic OR yfrog OR plixi OR twitgoo OR img.ly OR picplz OR lockerz";

    if (this.getInt("filter") == 1)
        q += " " + modLinks;
    if (this.getInt("filter") == 2)
        q += " " + webPhotos + " " + modLinks;
    if (this.getInt("filter") == 3)
        q += " " + websVideos + " " + modLinks;
    if (this.getInt("filter") == 4)
        q += " " + websVideos + " OR " + webPhotos + " " + modLinks;
    if (this.getInt("filter") == 5)
        q += " source:twitterfeed " + modLinks;
    if (this.getInt("filter") == 6)
        q += " ?";
    if (this.getInt("filter") == 7)
        q += " market.android.com OR androidzoom.com OR androlib.com OR appbrain.com OR bubiloop.com OR yaam.mobi OR slideme.org "
                + modLinks;

    Log.d(Utils.TAG, "Buscando: " + q);

    Query query = new Query(q);

    if (this.getInt("use_geo") == 1) {
        if (this.getInt("type_geo") == 0) { // coordenadas del mapa
            GeoLocation gl = new GeoLocation(this.getDouble("latitude"), this.getDouble("longitude"));
            String unit = Query.KILOMETERS;
            if (this.getInt("type_distance") == 0)
                unit = Query.MILES;
            query.setGeoCode(gl, this.getDouble("distance"), unit);
        }

        if (this.getInt("type_geo") == 1) { // coordenadas del gps
            Location loc = LocationUtils.getLastLocation(cnt);
            if (loc != null) {
                GeoLocation gl = new GeoLocation(loc.getLatitude(), loc.getLongitude());
                String unit = Query.KILOMETERS;
                if (this.getInt("type_distance") == 0)
                    unit = Query.MILES;
                query.setGeoCode(gl, this.getDouble("distance"), unit);
            } else {
                mErrorLastQuery = cnt.getString(R.string.no_location);
            }
        }
    }

    PreferenceManager.setDefaultValues(cnt, R.xml.preferences, false);
    SharedPreferences preference = PreferenceManager.getDefaultSharedPreferences(cnt);

    int count = Integer.parseInt(preference.getString("prf_n_max_download", "60"));
    if (count <= 0)
        count = 60;

    query.setCount(count);

    String lang = "";
    if (!this.getString("lang").equals(""))
        lang = this.getString("lang");
    if (!lang.equals("all"))
        query.setLang(lang);

    // obtener desde donde quiero hacer la consulta

    if (getInt("notifications") == 1) {
        String where = "search_id = " + this.getId() + " AND favorite = 0";
        int nResult = DataFramework.getInstance().getEntityListCount("tweets", where);
        if (nResult > 0) {
            long mLastIdNotification = DataFramework.getInstance().getTopEntity("tweets", where, "date desc")
                    .getLong("tweet_id");
            query.setSinceId(mLastIdNotification);
        }
    }

    //query.setResultType(Query.POPULAR);

    return query;
}

From source file:com.mycompany.omnomtweets.TweetsAboutCandidates.java

/**
 * Searches for tweets using the given query string.
 * @param str the query to use/*from   w  w  w. ja  v  a2  s.  c om*/
 * @return List of the tweet statuses that match the query.
 */
public List<Status> search(String str, long maxId) {
    List<Status> tweets = null;
    try {
        Query query = new Query(str);
        query.setCount(100);
        //English only.
        query.setLang("en");
        QueryResult result;
        query.setMaxId(maxId);
        result = twitter.search(query);
        tweets = result.getTweets();
    } catch (TwitterException te) {
        System.out.println("Failed to search tweets: " + te.getMessage());
    }
    return tweets;
}

From source file:com.schnee.tweetgeister.visualization.TweetgeisterBalloonLayout.java

License:Open Source License

/**
 * /*  w  w w  .  j  a  v  a  2s. co  m*/
 */
private void createTree() {

    try {
        Twitter twitter = new TwitterFactory().getInstance();

        List<Tweet> allTweets = new ArrayList<Tweet>();
        String qString = "bp";
        Query query = new Query(qString);
        query.setRpp(100);
        query.setLang("en");

        int hits = 100;
        int page = 1;
        while (allTweets.size() < 1400 && page < 16 && hits == 100) {

            query.setPage(page);

            // System.out.println(query.toString());
            QueryResult result = twitter.search(query);
            List<Tweet> tweets = result.getTweets();
            allTweets.addAll(tweets);
            hits = tweets.size();
            System.out.println("page: " + page + " hits: " + hits + " all tweets:" + allTweets.size());
            page++;
        }
        System.out.println("hits: " + allTweets.size());

        Set<CharSequence> inputSet = new HashSet<CharSequence>();

        for (Tweet tweet : allTweets) {
            inputSet.add(tweet.getText());
        }

        Clusterer cl = new Clusterer(inputSet, TokenizedCharSequence.TOKENIZER_FACTORY);

        com.schnee.tweetgeister.data.Tree<CharSequence> mindmap = cl.buildTree();

        fillTree(mindmap);

    } catch (Exception e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

}

From source file:dk.netarkivet.harvester.tools.TwitterDecidingScope.java

License:Open Source License

/**
 * This routine makes any necessary Twitter API calls and queues the content discovered.
 *
 * @param controller The controller for this crawl.
 *//*ww w.ja  va 2s. c o  m*/
@Override
public void initialize(CrawlController controller) {
    super.initialize(controller);
    twitter = (new TwitterFactory()).getInstance();
    keywords = null;
    try {
        keywords = (StringList) super.getAttribute(ATTR_KEYWORDS);
        pages = ((Integer) super.getAttribute(ATTR_PAGES)).intValue();
        geoLocations = (StringList) super.getAttribute(ATTR_GEOLOCATIONS);
        language = (String) super.getAttribute(ATTR_LANG);
        if (language == null) {
            language = "all";
        }
        resultsPerPage = (Integer) super.getAttribute(ATTR_RESULTS_PER_PAGE);
        queueLinks = (Boolean) super.getAttribute(ATTR_QUEUE_LINKS);
        queueUserStatus = (Boolean) super.getAttribute(ATTR_QUEUE_USER_STATUS);
        queueUserStatusLinks = (Boolean) super.getAttribute(ATTR_QUEUE_USER_STATUS_LINKS);
        queueKeywordLinks = (Boolean) super.getAttribute(ATTR_QUEUE_KEYWORD_LINKS);
    } catch (AttributeNotFoundException e1) {
        e1.printStackTrace();
        throw new RuntimeException(e1);
    } catch (MBeanException e1) {
        e1.printStackTrace();
        throw new RuntimeException(e1);
    } catch (ReflectionException e1) {
        e1.printStackTrace();
        throw new RuntimeException(e1);
    }
    for (Object keyword : keywords) {
        log.info("Twitter Scope keyword: {}", keyword);
    }
    // If keywords or geoLocations is missing, add a list with a single empty string so that the main loop is
    // executed at least once.
    if (keywords == null || keywords.isEmpty()) {
        keywords = new StringList("keywords", "empty keyword list", new String[] { "" });
    }
    if (geoLocations == null || geoLocations.isEmpty()) {
        geoLocations = new StringList("geolocations", "empty geolocation list", new String[] { "" });
    }
    log.info("Twitter Scope will queue {} page(s) of results.", pages);
    // Nested loop over keywords, geo_locations and pages.
    for (Object keyword : keywords) {
        String keywordString = (String) keyword;
        for (Object geoLocation : geoLocations) {
            String urlQuery = (String) keyword;
            Query query = new Query();
            query.setRpp(resultsPerPage);
            if (language != null && !language.equals("")) {
                query.setLang(language);
                urlQuery += " lang:" + language;
                keywordString += " lang:" + language;
            }
            urlQuery = "http://twitter.com/search/" + URLEncoder.encode(urlQuery);
            if (queueKeywordLinks) {
                addSeedIfLegal(urlQuery);
            }
            for (int page = 1; page <= pages; page++) {
                query.setPage(page);
                if (!keyword.equals("")) {
                    query.setQuery(keywordString);
                }
                if (!geoLocation.equals("")) {
                    String[] locationArray = ((String) geoLocation).split(",");
                    try {
                        GeoLocation location = new GeoLocation(Double.parseDouble(locationArray[0]),
                                Double.parseDouble(locationArray[1]));
                        query.setGeoCode(location, Double.parseDouble(locationArray[2]), locationArray[3]);
                    } catch (NumberFormatException e) {
                        e.printStackTrace();
                    }
                }
                try {
                    final QueryResult result = twitter.search(query);
                    List<Tweet> tweets = result.getTweets();
                    for (Tweet tweet : tweets) {
                        long id = tweet.getId();
                        String fromUser = tweet.getFromUser();
                        String tweetUrl = "http://www.twitter.com/" + fromUser + "/status/" + id;
                        addSeedIfLegal(tweetUrl);
                        tweetCount++;
                        if (queueLinks) {
                            extractEmbeddedLinks(tweet);
                        }
                        if (queueUserStatus) {
                            String statusUrl = "http://twitter.com/" + tweet.getFromUser() + "/";
                            addSeedIfLegal(statusUrl);
                            linkCount++;
                            if (queueUserStatusLinks) {
                                queueUserStatusLinks(tweet.getFromUser());
                            }
                        }
                    }
                } catch (TwitterException e1) {
                    log.error(e1.getMessage());
                }
            }
        }

    }
    System.out.println(
            TwitterDecidingScope.class + " added " + tweetCount + " tweets and " + linkCount + " other links.");
}

From source file:dk.netarkivet.harvester.tools.TwitterDecidingScope.java

License:Open Source License

/**
 * Searches for a given users recent tweets and queues and embedded material found.
 *
 * @param user The twitter username (without the @ prefix).
 *//* w  w w .  j a  v a2s. c o m*/
private void queueUserStatusLinks(String user) {
    Query query = new Query();
    query.setQuery("@" + user);
    query.setRpp(20);
    if (!language.equals("")) {
        query.setLang(language);
    }
    try {
        List<Tweet> results = twitter.search(query).getTweets();
        if (results != null && !results.isEmpty()) {
            System.out.println("Extracting embedded links for user " + user);
        }
        for (Tweet result : results) {
            if (result.getIsoLanguageCode().equals(language) || language.equals("")) {
                extractEmbeddedLinks(result);
            }
        }
    } catch (TwitterException e) {
        e.printStackTrace();
    }
}