Example usage for org.apache.commons.lang.text StrTokenizer setDelimiterString

List of usage examples for org.apache.commons.lang.text StrTokenizer setDelimiterString

Introduction

In this page you can find the example usage for org.apache.commons.lang.text StrTokenizer setDelimiterString.

Prototype

public StrTokenizer setDelimiterString(String delim) 

Source Link

Document

Sets the field delimiter string.

Usage

From source file:com.softlayer.objectstorage.Account.java

/**
 * list all containers on this account//from w w  w.jav a2  s . co  m
 * 
 * @return a list of container objects
 * @throws IOException
 */
public List<Container> listAllContainers() throws IOException {
    Hashtable<String, String> params = super.createAuthParams();
    ClientResource client = super.get(params, super.storageurl);
    Representation entity = client.getResponseEntity();
    String containers = entity.getText();
    StrTokenizer tokenize = new StrTokenizer(containers);
    tokenize.setDelimiterString("\n");
    String[] cont = tokenize.getTokenArray();
    ArrayList<Container> conts = new ArrayList<Container>();
    for (String token : cont) {

        conts.add(new Container(token, super.baseurl, this.username, this.password, false));

    }

    return conts;

}

From source file:com.softlayer.objectstorage.Account.java

/**
 * list all CDN enabled containers for this account
 * //from  w w  w  . j  a v a 2 s . c o  m
 * @return a list of containers with CDN enabled
 * @throws IOException
 */
public List<Container> listAllCDNContainers() throws IOException {
    Hashtable<String, String> params = super.createAuthParams();
    params.put(X_CONTENT, "cdn");
    ClientResource client = super.get(params, super.storageurl);
    Representation entity = client.getResponseEntity();
    String containers = entity.getText();
    StrTokenizer tokenize = new StrTokenizer(containers);
    tokenize.setDelimiterString("\n");
    String[] cont = tokenize.getTokenArray();
    ArrayList<Container> conts = new ArrayList<Container>();
    for (String token : cont) {

        conts.add(new Container(token, super.baseurl, this.username, this.password, false));

    }

    return conts;

}

From source file:com.softlayer.objectstorage.Container.java

/**
 * Utility method for getting data from REST api to populate this object
 * //from   w w  w.  j av  a  2s  .c  o m
 * @throws EncoderException
 * @throws IOException
 */
private void loadData() throws EncoderException, IOException {
    Hashtable<String, String> params = super.createAuthParams();
    String uName = super.saferUrlEncode(this.name);
    ClientResource client = super.get(params, super.storageurl + "/" + uName);
    Representation entity = client.getResponseEntity();
    String containers = entity.getText();
    StrTokenizer tokenize = new StrTokenizer(containers);
    tokenize.setDelimiterString("\n");
    String[] obj = tokenize.getTokenArray();
    this.objs = new ArrayList<ObjectFile>();
    for (String token : obj) {

        this.objs.add(new ObjectFile(token, this.name, this.baseUrl, this.username, this.password, false));

    }

}

From source file:it.drwolf.ridire.util.async.FrequencyListGenerator.java

private Map<String, Integer> getBareTable(List<String> corporaNames, String functionalMetadatumDescription,
        String semanticMetadatumDescription, String frequencyBy) throws IOException {
    Map<String, Integer> fl = new HashMap<String, Integer>();
    Query q = new BooleanQuery();
    if (corporaNames != null && corporaNames.size() > 0
            && !(corporaNames.size() == 1 && corporaNames.get(0) == null)) {
        BooleanQuery corporaQuery = new BooleanQuery();
        for (String cn : corporaNames) {
            if (cn != null) {
                corporaQuery.add(new TermQuery(new Term("corpus", cn)), Occur.SHOULD);
            }/* w  w w . j  a  v a2s .c  o  m*/
        }
        ((BooleanQuery) q).add(corporaQuery, Occur.MUST);
    }
    if (functionalMetadatumDescription != null) {
        TermQuery funcQuery = new TermQuery(new Term("functionalMetadatum", functionalMetadatumDescription));
        ((BooleanQuery) q).add(funcQuery, Occur.MUST);
    }
    if (semanticMetadatumDescription != null) {
        TermQuery semaQuery = new TermQuery(new Term("semanticMetadatum", semanticMetadatumDescription));
        ((BooleanQuery) q).add(semaQuery, Occur.MUST);
    }
    PrefixQuery prefixQuery = new PrefixQuery(new Term("performaFL", ""));
    ((BooleanQuery) q).add(prefixQuery, Occur.MUST);
    IndexSearcher indexSearcher = this.contextsIndexManager.getIndexSearcherR();
    System.out.println("Starting FL calculation");
    TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector();
    indexSearcher.search(q, null, totalHitCountCollector);
    int totalHits = totalHitCountCollector.getTotalHits();
    System.out.println("Frequency list calculation. Docs to be processed: " + totalHits);
    ScoreDoc after = null;
    int docsProcessed = 0;
    for (int j = 0; j < totalHits; j += FrequencyListGenerator.BATCH_SIZE) {
        TopDocs topDocs = null;
        if (after == null) {
            topDocs = indexSearcher.search(q, FrequencyListGenerator.BATCH_SIZE);
        } else {
            topDocs = indexSearcher.searchAfter(after, q, FrequencyListGenerator.BATCH_SIZE);
        }
        StrTokenizer strTokenizer = new StrTokenizer();
        strTokenizer.setDelimiterString(ContextAnalyzer.SEPARATOR);
        ScoreDoc[] scoreDocs = topDocs.scoreDocs;
        if (scoreDocs != null) {
            for (ScoreDoc scoreDoc : scoreDocs) {
                ++docsProcessed;
                after = scoreDoc;
                TermFreqVector termFreqVector = indexSearcher.getIndexReader().getTermFreqVector(scoreDoc.doc,
                        "performaFL");
                if (termFreqVector == null) {
                    continue;
                }
                String[] terms = termFreqVector.getTerms();
                int[] frequencies = termFreqVector.getTermFrequencies();
                for (int i = 0; i < terms.length; i++) {
                    String term = terms[i];
                    String[] tokenArray = strTokenizer.reset(term).getTokenArray();
                    if (tokenArray.length != 3) {
                        continue;
                    }
                    String pos = tokenArray[1];
                    String lemma = tokenArray[2];
                    if (lemma.equals("<unknown>")) {
                        lemma = tokenArray[0];
                    }
                    if (frequencyBy.equals("forma")) {
                        term = tokenArray[0];
                    } else if (frequencyBy.equals("lemma")) {
                        term = lemma;
                    } else if (frequencyBy.equals("PoS-lemma")) {
                        if (pos.startsWith("VER")) {
                            pos = "VER";
                        }
                        term = pos + " / " + lemma;
                    } else if (frequencyBy.equals("PoS-forma")) {
                        if (pos.startsWith("VER")) {
                            pos = "VER";
                        }
                        term = pos + " / " + tokenArray[0];
                    } else {
                        term = tokenArray[1];
                    }
                    Integer count = fl.get(term);
                    if (count == null) {
                        fl.put(term, frequencies[i]);
                    } else {
                        fl.put(term, frequencies[i] + count);
                    }
                }
                if (docsProcessed % 1000 == 0) {
                    System.out.println("Frequency list calculation. Docs processed: " + docsProcessed
                            + " on total: " + totalHits + " (" + docsProcessed * 100.0f / totalHits + "%)");
                }
            }
        }
    }
    return fl;
}

From source file:com.rackspacecloud.client.cloudfiles.FilesClient.java

/**
 * //w ww .  j a va  2s  .co  m
 * 
 * @param limit
 *             
 * @param marker
 *             
 * 
 * @return 0 
 * @throws IOException
 *              IO
 * @throws HttpException
 *              Http
 * @throws FilesExcepiton
 *              
 * @throws FilesAuthorizationException
 *              
 */
public List<FilesContainer> listContainers(int limit, String marker)
        throws IOException, HttpException, FilesException {
    if (!this.isLoggedin()) {
        throw new FilesAuthorizationException("You must be logged in", null, null);
    }
    HttpGet method = null;
    try {
        LinkedList<NameValuePair> parameters = new LinkedList<NameValuePair>();

        if (limit > 0) {
            parameters.add(new BasicNameValuePair("limit", String.valueOf(limit)));
        }
        if (marker != null) {
            parameters.add(new BasicNameValuePair("marker", marker));
        }

        String uri = parameters.size() > 0 ? makeURI(storageURL, parameters) : storageURL;
        method = new HttpGet(uri);
        method.getParams().setIntParameter("http.socket.timeout", connectionTimeOut);
        method.setHeader(FilesConstants.X_AUTH_TOKEN, authToken);
        FilesResponse response = new FilesResponse(client.execute(method));

        if (response.getStatusCode() == HttpStatus.SC_UNAUTHORIZED) {
            method.abort();
            if (login()) {
                method = new HttpGet(uri);
                method.getParams().setIntParameter("http.socket.timeout", connectionTimeOut);
                method.setHeader(FilesConstants.X_AUTH_TOKEN, authToken);
                response = new FilesResponse(client.execute(method));
            } else {
                throw new FilesAuthorizationException("Re-login failed", response.getResponseHeaders(),
                        response.getStatusLine());
            }
        }

        if (response.getStatusCode() == HttpStatus.SC_OK) {
            // logger.warn(method.getResponseCharSet());
            StrTokenizer tokenize = new StrTokenizer(response.getResponseBodyAsString());
            tokenize.setDelimiterString("\n");
            String[] containers = tokenize.getTokenArray();
            ArrayList<FilesContainer> containerList = new ArrayList<FilesContainer>();
            for (String container : containers) {
                containerList.add(new FilesContainer(container, this));
            }
            return containerList;
        } else if (response.getStatusCode() == HttpStatus.SC_NO_CONTENT) {
            return new ArrayList<FilesContainer>();
        } else if (response.getStatusCode() == HttpStatus.SC_NOT_FOUND) {
            throw new FilesNotFoundException("Account was not found", response.getResponseHeaders(),
                    response.getStatusLine());
        } else {
            throw new FilesException("Unexpected resposne from server", response.getResponseHeaders(),
                    response.getStatusLine());
        }
    } catch (Exception ex) {
        ex.printStackTrace();
        throw new FilesException("Unexpected error, probably parsing Server XML", ex);
    } finally {
        if (method != null)
            method.abort();
    }
}

From source file:it.drwolf.ridire.index.sketch.AsyncSketchCreator.java

private HashMap<String, SketchResult> extractSingleLemmaSketches(String lemma, String functionalMetadatum,
        String semanticMetadatum, Sketch s, IndexWriter indexWriter) {
    HashMap<String, SketchResult> sr = new HashMap<String, SketchResult>();
    String freqTable = "freq_lemma_all";
    if (functionalMetadatum != null) {
        freqTable = "freq_lemma_" + functionalMetadatum.trim().replaceAll("\\s", "_");
    }//w  ww .j  a  v  a  2 s. c  o m
    if (semanticMetadatum != null) {
        freqTable = "freq_lemma_" + semanticMetadatum.trim().replaceAll("\\s", "_");
    }
    List<Number> firstFreqList = this.entityManager
            .createNativeQuery("select freq from " + freqTable + " where item=:item")
            .setParameter("item", lemma).getResultList();
    if (firstFreqList != null && firstFreqList.size() > 0 && firstFreqList.get(0).longValue() > 0) {
        long firstFreq = firstFreqList.get(0).longValue();
        StrTokenizer strTokenizer = new StrTokenizer();
        try {
            List<File> tableFiles = new ArrayList<File>();
            String queryString = null;
            String stringToAdd = null;
            String realQuery = "";
            for (GramRel gramRel : s.getGramrels()) {
                File resTblFile = File.createTempFile("ridireTBL", ".tbl");
                tableFiles.add(resTblFile);
                String rel = gramRel.getRel();
                realQuery = String.format(rel, lemma);
                String subquery = gramRel.getSubquery();
                if (subquery != null) {
                    realQuery += ";\nASUB;\n" + String.format(subquery, lemma);
                }
                queryString = this.createQueryForCQP(resTblFile, stringToAdd, functionalMetadatum,
                        semanticMetadatum, realQuery, s.isTrinary());
                File queryFile = File.createTempFile("ridireQ", ".query");
                FileUtils.writeStringToFile(queryFile, queryString);
                long start = System.currentTimeMillis();
                this.executeCQPQuery(queryFile, gramRel.isInverse());
                System.out.println(
                        "CQP exec time for " + realQuery.replaceAll("\n", " ") + " " + functionalMetadatum + " "
                                + semanticMetadatum + " : " + (System.currentTimeMillis() - start));
                if (!resTblFile.exists() || !resTblFile.canRead()) {
                    continue;
                }
                FileUtils.deleteQuietly(queryFile);
            }
            List<String> lines = null;
            if (!s.isTrinary()) {
                File resTblFile = File.createTempFile("ridireTBLFINAL", ".tbl");
                this.compactLines(tableFiles, resTblFile);
                lines = FileUtils.readLines(resTblFile);
                FileUtils.deleteQuietly(resTblFile);
            } else if (tableFiles.size() > 0) {
                lines = FileUtils.readLines(tableFiles.get(0));
            }
            for (File tableFile : tableFiles) {
                FileUtils.deleteQuietly(tableFile);
            }
            if (s.isTrinary()) {
                strTokenizer.setDelimiterString("@@##");
                this.processTrinaryTable(freqTable, firstFreq, strTokenizer, lines, lemma, indexWriter,
                        s.getName(), functionalMetadatum, semanticMetadatum, s.getGoodFor());
            } else {
                strTokenizer.setDelimiterString(" ");
                this.processNotTrinaryTable(sr, freqTable, firstFreq, strTokenizer, lines);
            }
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
    return sr;
}