Example usage for org.apache.commons.lang.text StrTokenizer getCSVInstance

List of usage examples for org.apache.commons.lang.text StrTokenizer getCSVInstance

Introduction

In this page you can find the example usage for org.apache.commons.lang.text StrTokenizer getCSVInstance.

Prototype

public static StrTokenizer getCSVInstance(char[] input) 

Source Link

Document

Gets a new tokenizer instance which parses Comma Separated Value strings initializing it with the given input.

Usage

From source file:com.hmsinc.epicenter.model.geography.util.GeocoderDotUSClient.java

public Geometry geocode(String address, String city, String state, String zipcode) {

    Validate.notNull(address);/*  w ww . j  av a  2s . c  om*/
    Validate.notNull(city);
    Validate.notNull(state);
    Validate.notNull(zipcode);

    Geometry g = null;

    try {

        final GetMethod get = new GetMethod(geoCoderURL);

        final NameValuePair[] query = { new NameValuePair("address", address), new NameValuePair("city", city),
                new NameValuePair("state", state), new NameValuePair("zipcode", zipcode) };

        get.setQueryString(query);
        httpClient.executeMethod(get);

        final String response = get.getResponseBodyAsString();
        get.releaseConnection();

        if (response != null) {
            final StrTokenizer tokenizer = StrTokenizer.getCSVInstance(response);
            if (tokenizer.size() == 5) {

                final Double latitude = Double.valueOf(tokenizer.nextToken());
                final Double longitude = Double.valueOf(tokenizer.nextToken());

                g = factory.createPoint(new Coordinate(longitude, latitude));
                logger.debug("Geometry: " + g.toString());
            }
        }

    } catch (HttpException e) {
        throw new RuntimeException(e);
    } catch (IOException e) {
        throw new RuntimeException(e);
    }

    return g;
}

From source file:de.tudarmstadt.ukp.dkpro.tc.mallet.report.MalletBatchCrossValidationReport.java

@Override
public void execute() throws Exception {
    StorageService store = getContext().getStorageService();

    FlexTable<String> table = FlexTable.forClass(String.class);

    Map<String, List<Double>> key2resultValues = new HashMap<String, List<Double>>();

    for (TaskContextMetadata subcontext : getSubtasks()) {
        String name = BatchTask.class.getSimpleName() + "CrossValidation";
        // one CV batch (which internally ran numFolds times)
        if (subcontext.getLabel().startsWith(name)) {
            Map<String, String> discriminatorsMap = store
                    .retrieveBinary(subcontext.getId(), Task.DISCRIMINATORS_KEY, new PropertiesAdapter())
                    .getMap();//from w  w  w .j av  a 2  s.  c o m

            File eval = store.getStorageFolder(subcontext.getId(), EVAL_FILE_NAME + SUFFIX_CSV);

            Map<String, String> resultMap = new HashMap<String, String>();

            String[][] evalMatrix = null;

            int i = 0;
            for (String line : FileUtils.readLines(eval)) {
                String[] tokenizedLine = StrTokenizer.getCSVInstance(line).getTokenArray();
                if (evalMatrix == null) {
                    evalMatrix = new String[FileUtils.readLines(eval).size()][tokenizedLine.length];
                }
                evalMatrix[i] = tokenizedLine;
                i++;
            }

            // columns
            for (int j = 0; j < evalMatrix[0].length; j++) {
                String header = evalMatrix[0][j];
                String[] vals = new String[evalMatrix.length - 1];
                // rows
                for (int k = 1; k < evalMatrix.length; k++) {
                    if (evalMatrix[k][j].equals("null")) {
                        vals[k - 1] = String.valueOf(0.);
                    } else {
                        vals[k - 1] = evalMatrix[k][j];
                    }
                }
                Mean mean = new Mean();
                Sum sum = new Sum();
                StandardDeviation std = new StandardDeviation();

                double[] dVals = new double[vals.length];
                Set<String> sVals = new HashSet<String>();
                for (int k = 0; k < vals.length; k++) {
                    try {
                        dVals[k] = Double.parseDouble(vals[k]);
                        sVals = null;
                    } catch (NumberFormatException e) {
                        dVals = null;
                        sVals.add(vals[k]);
                    }
                }

                if (dVals != null) {
                    if (nonAveragedResultsMeasures.contains(header)) {
                        resultMap.put(header, String.valueOf(sum.evaluate(dVals)));
                    } else {
                        resultMap.put(header, String.valueOf(mean.evaluate(dVals)) + "\u00B1"
                                + String.valueOf(std.evaluate(dVals)));
                    }
                } else {
                    if (sVals.size() > 1) {
                        resultMap.put(header, "---");
                    } else {
                        resultMap.put(header, vals[0]);
                    }
                }
            }

            String key = getKey(discriminatorsMap);

            List<Double> results;
            if (key2resultValues.get(key) == null) {
                results = new ArrayList<Double>();
            } else {
                results = key2resultValues.get(key);

            }
            key2resultValues.put(key, results);

            Map<String, String> values = new HashMap<String, String>();
            Map<String, String> cleanedDiscriminatorsMap = new HashMap<String, String>();

            for (String disc : discriminatorsMap.keySet()) {
                if (!ReportUtils.containsExcludePattern(disc, discriminatorsToExclude)) {
                    cleanedDiscriminatorsMap.put(disc, discriminatorsMap.get(disc));
                }
            }
            values.putAll(cleanedDiscriminatorsMap);
            values.putAll(resultMap);

            table.addRow(subcontext.getLabel(), values);
        }
    }

    getContext().getLoggingService().message(getContextLabel(), ReportUtils.getPerformanceOverview(table));

    // Excel cannot cope with more than 255 columns
    if (table.getColumnIds().length <= 255) {
        getContext().storeBinary(EVAL_FILE_NAME + "_compact" + SUFFIX_EXCEL, table.getExcelWriter());
    }
    getContext().storeBinary(EVAL_FILE_NAME + "_compact" + SUFFIX_CSV, table.getCsvWriter());

    table.setCompact(false);
    // Excel cannot cope with more than 255 columns
    if (table.getColumnIds().length <= 255) {
        getContext().storeBinary(EVAL_FILE_NAME + SUFFIX_EXCEL, table.getExcelWriter());
    }
    getContext().storeBinary(EVAL_FILE_NAME + SUFFIX_CSV, table.getCsvWriter());

    // output the location of the batch evaluation folder
    // otherwise it might be hard for novice users to locate this
    File dummyFolder = store.getStorageFolder(getContext().getId(), "dummy");
    // TODO can we also do this without creating and deleting the dummy folder?
    getContext().getLoggingService().message(getContextLabel(),
            "Storing detailed results in:\n" + dummyFolder.getParent() + "\n");
    dummyFolder.delete();
}

From source file:de.tudarmstadt.ukp.dkpro.tc.crfsuite.CRFSuiteBatchCrossValidationReport.java

@Override
public void execute() throws Exception {
    StorageService store = getContext().getStorageService();

    FlexTable<String> table = FlexTable.forClass(String.class);

    Map<String, List<Double>> key2resultValues = new HashMap<String, List<Double>>();

    for (TaskContextMetadata subcontext : getSubtasks()) {
        String name = ExperimentCrossValidation.class.getSimpleName();
        // one CV batch (which internally ran numFolds times)
        if (subcontext.getLabel().startsWith(name)) {
            Map<String, String> discriminatorsMap = store
                    .retrieveBinary(subcontext.getId(), Task.DISCRIMINATORS_KEY, new PropertiesAdapter())
                    .getMap();/* w w  w.j av a 2 s.c  om*/

            File eval = store.getStorageFolder(subcontext.getId(), EVAL_FILE_NAME + SUFFIX_CSV);

            Map<String, String> resultMap = new HashMap<String, String>();

            String[][] evalMatrix = null;

            int i = 0;
            for (String line : FileUtils.readLines(eval)) {
                String[] tokenizedLine = StrTokenizer.getCSVInstance(line).getTokenArray();
                if (evalMatrix == null) {
                    evalMatrix = new String[FileUtils.readLines(eval).size()][tokenizedLine.length];
                }
                evalMatrix[i] = tokenizedLine;
                i++;
            }

            // columns
            for (int j = 0; j < evalMatrix[0].length; j++) {
                String header = evalMatrix[0][j];
                String[] vals = new String[evalMatrix.length - 1];
                // rows
                for (int k = 1; k < evalMatrix.length; k++) {
                    if (evalMatrix[k][j].equals("null")) {
                        vals[k - 1] = String.valueOf(0.);
                    } else {
                        vals[k - 1] = evalMatrix[k][j];
                    }

                }
                Mean mean = new Mean();
                Sum sum = new Sum();
                StandardDeviation std = new StandardDeviation();

                double[] dVals = new double[vals.length];
                Set<String> sVals = new HashSet<String>();
                for (int k = 0; k < vals.length; k++) {
                    try {
                        dVals[k] = Double.parseDouble(vals[k]);
                        sVals = null;
                    } catch (NumberFormatException e) {
                        dVals = null;
                        sVals.add(vals[k]);
                    }
                }

                if (dVals != null) {
                    if (nonAveragedResultsMeasures.contains(header)) {
                        resultMap.put(header + foldSum, String.valueOf(sum.evaluate(dVals)));
                    } else {
                        resultMap.put(header + foldAveraged, String.valueOf(
                                mean.evaluate(dVals) + "\u00B1" + String.valueOf(std.evaluate(dVals))));
                    }
                } else {
                    if (sVals.size() > 1) {
                        resultMap.put(header, "---");
                    } else {
                        resultMap.put(header, vals[0]);
                    }
                }
            }

            String key = getKey(discriminatorsMap);

            List<Double> results;
            if (key2resultValues.get(key) == null) {
                results = new ArrayList<Double>();
            } else {
                results = key2resultValues.get(key);

            }
            key2resultValues.put(key, results);

            Map<String, String> values = new HashMap<String, String>();
            Map<String, String> cleanedDiscriminatorsMap = new HashMap<String, String>();

            for (String disc : discriminatorsMap.keySet()) {
                if (!ReportUtils.containsExcludePattern(disc, discriminatorsToExclude)) {
                    cleanedDiscriminatorsMap.put(disc, discriminatorsMap.get(disc));
                }
            }
            values.putAll(cleanedDiscriminatorsMap);
            values.putAll(resultMap);

            table.addRow(subcontext.getLabel(), values);
        }
    }

    getContext().getLoggingService().message(getContextLabel(), ReportUtils.getPerformanceOverview(table));
    // Excel cannot cope with more than 255 columns
    if (table.getColumnIds().length <= 255) {
        getContext().storeBinary(EVAL_FILE_NAME + "_compact" + SUFFIX_EXCEL, table.getExcelWriter());
    }
    getContext().storeBinary(EVAL_FILE_NAME + "_compact" + SUFFIX_CSV, table.getCsvWriter());

    table.setCompact(false);
    // Excel cannot cope with more than 255 columns
    if (table.getColumnIds().length <= 255) {
        getContext().storeBinary(EVAL_FILE_NAME + SUFFIX_EXCEL, table.getExcelWriter());
    }
    getContext().storeBinary(EVAL_FILE_NAME + SUFFIX_CSV, table.getCsvWriter());

    // output the location of the batch evaluation folder
    // otherwise it might be hard for novice users to locate this
    File dummyFolder = store.getStorageFolder(getContext().getId(), "dummy");
    // TODO can we also do this without creating and deleting the dummy folder?
    getContext().getLoggingService().message(getContextLabel(),
            "Storing detailed results in:\n" + dummyFolder.getParent() + "\n");
    dummyFolder.delete();
}

From source file:com.manydesigns.elements.util.Util.java

public static String[] matchStringArray(String text) {
    StrTokenizer strTokenizer = StrTokenizer.getCSVInstance(text);
    return strTokenizer.getTokenArray();
}

From source file:de.tudarmstadt.ukp.dkpro.tc.core.util.ReportUtils.java

/**
 * Adds results from a serialized matrix to a map
 * //from w w w .  j av  a 2  s.c  o  m
 * @param aggregateMap
 * @param matrix
 *            a csv matrix with the class names in the first row and first column
 * @return updated map
 * @throws IOException
 */
public static Map<List<String>, Double> updateAggregateMatrix(Map<List<String>, Double> aggregateMap,
        File matrix) throws IOException {
    List<String> confMatrixLines = FileUtils.readLines(matrix);
    StrTokenizer l = StrTokenizer.getCSVInstance(confMatrixLines.get(0));
    l.setDelimiterChar(',');
    String[] headline = l.getTokenArray();

    for (int i = 1; i < confMatrixLines.size(); i++) {
        for (int j = 1; j < headline.length; j++) {
            StrTokenizer line = StrTokenizer.getCSVInstance(confMatrixLines.get(i));
            String pred = headline[j];
            line.setDelimiterChar(',');
            String act = line.getTokenArray()[0];
            double value = Double.valueOf(line.getTokenArray()[j]);

            List<String> key = new ArrayList<String>(Arrays.asList(new String[] { pred, act }));

            if (aggregateMap.get(key) != null) {
                aggregateMap.put(key, aggregateMap.get(key) + value);
            } else {
                aggregateMap.put(key, value);
            }
        }
    }
    return aggregateMap;
}

From source file:com.savy3.util.DBConfiguration.java

/**
 * Converts a String back to connection parameters.
 * @param input String from configuration
 * @return JDBC connection parameters// ww  w  .j  av  a 2  s . c om
 */
protected static Properties propertiesFromString(String input) {
    if (input != null && !input.isEmpty()) {
        Properties result = new Properties();
        StrTokenizer propertyTokenizer = StrTokenizer.getCSVInstance(input);
        StrTokenizer valueTokenizer = StrTokenizer.getCSVInstance();
        valueTokenizer.setDelimiterChar('=');
        while (propertyTokenizer.hasNext()) {
            valueTokenizer.reset(propertyTokenizer.nextToken());
            String[] values = valueTokenizer.getTokenArray();
            if (values.length == 2) {
                result.put(values[0], values[1]);
            }
        }
        return result;
    } else {
        return null;
    }
}

From source file:org.kuali.kfs.module.tem.service.impl.TravelerServiceImpl.java

@Override
public boolean isEmployee(final TravelerDetail traveler) {
    final String param = getParameterService()
            .getParameterValueAsString(TemParameterConstants.TEM_DOCUMENT.class, EMPLOYEE_TRAVELER_TYPE_CODES);
    List<String> employeeTypes = StrTokenizer.getCSVInstance(param).getTokenList();

    return employeeTypes.contains(StringUtils.defaultString(traveler.getTravelerTypeCode()));
}

From source file:org.owasp.esapi.reference.DefaultSecurityConfiguration.java

/**
 * Load configuration. Never prints properties.
 * /*from   w  w w.  j av a 2  s .  c  o m*/
 * @throws java.io.IOException
 *             if the file is inaccessible
 */
protected void loadConfiguration() throws IOException {
    try {
        //first attempt file IO loading of properties
        logSpecial("Attempting to load " + resourceFile + " via file I/O.");
        properties = loadPropertiesFromStream(getResourceStream(resourceFile), resourceFile);

    } catch (Exception iae) {
        //if file I/O loading fails, attempt classpath based loading next
        logSpecial("Loading " + resourceFile + " via file I/O failed. Exception was: " + iae);
        logSpecial("Attempting to load " + resourceFile + " via the classpath.");
        try {
            properties = loadConfigurationFromClasspath(resourceFile);
        } catch (Exception e) {
            logSpecial(resourceFile + " could not be loaded by any means. Fail.", e);
            throw new ConfigurationException(resourceFile + " could not be loaded by any means. Fail.", e);
        }
    }

    // if properties loaded properly above, get validation properties and merge them into the main properties
    if (properties != null) {
        final Iterator<String> validationPropFileNames;

        //defaults to single-valued for backwards compatibility
        final boolean multivalued = getESAPIProperty(VALIDATION_PROPERTIES_MULTIVALUED, false);
        final String validationPropValue = getESAPIProperty(VALIDATION_PROPERTIES, "validation.properties");

        if (multivalued) {
            // the following cast warning goes away if the apache commons lib is updated to current version            
            validationPropFileNames = StrTokenizer.getCSVInstance(validationPropValue);
        } else {
            validationPropFileNames = Collections.singletonList(validationPropValue).iterator();
        }

        //clear any cached validation patterns so they can be reloaded from validation.properties
        patternCache.clear();
        while (validationPropFileNames.hasNext()) {
            String validationPropFileName = validationPropFileNames.next();
            Properties validationProperties = null;
            try {
                //first attempt file IO loading of properties
                logSpecial("Attempting to load " + validationPropFileName + " via file I/O.");
                validationProperties = loadPropertiesFromStream(getResourceStream(validationPropFileName),
                        validationPropFileName);

            } catch (Exception iae) {
                //if file I/O loading fails, attempt classpath based loading next
                logSpecial("Loading " + validationPropFileName + " via file I/O failed.");
                logSpecial("Attempting to load " + validationPropFileName + " via the classpath.");
                try {
                    validationProperties = loadConfigurationFromClasspath(validationPropFileName);
                } catch (Exception e) {
                    logSpecial(validationPropFileName + " could not be loaded by any means. fail.", e);
                }
            }

            if (validationProperties != null) {
                Iterator<?> i = validationProperties.keySet().iterator();
                while (i.hasNext()) {
                    String key = (String) i.next();
                    String value = validationProperties.getProperty(key);
                    properties.put(key, value);
                }
            }

            if (shouldPrintProperties()) {

                //FIXME - make this chunk configurable
                /*
                 logSpecial("  ========Master Configuration========", null);
                 //logSpecial( "  ResourceDirectory: " + DefaultSecurityConfiguration.resourceDirectory );
                 Iterator j = new TreeSet( properties.keySet() ).iterator();
                 while (j.hasNext()) {
                     String key = (String)j.next();
                     // print out properties, but not sensitive ones like MasterKey and MasterSalt
                     if ( !key.contains( "Master" ) ) {
                logSpecial("  |   " + key + "=" + properties.get(key), null);
                    }
                 }
                 */
            }
        }
    }
}

From source file:org.ramadda.util.Utils.java

/**
 * _more_//from ww  w  . j a v  a2 s.  c  o  m
 *
 * @param line _more_
 * @param columnDelimiter _more_
 *
 * @return _more_
 */
public static List<String> tokenizeColumns(String line, String columnDelimiter) {
    //        System.err.println("line:" + line);
    //        System.err.println("line:" + line.replaceAll("\t","_TAB_"));
    List<String> toks = new ArrayList<String>();
    StrTokenizer tokenizer = StrTokenizer.getCSVInstance(line);
    tokenizer.setEmptyTokenAsNull(true);
    //        StrTokenizer tokenizer = new StrTokenizer(line, columnDelimiter);
    if (!columnDelimiter.equals(",")) {
        tokenizer.setDelimiterChar(columnDelimiter.charAt(0));
    }
    //        tokenizer.setQuoteChar('"');
    while (tokenizer.hasNext()) {
        String tok = tokenizer.nextToken();
        if (tok == null) {
            tok = "";
        }
        //            System.err.println("tok:" + tok);
        toks.add(tok);
    }

    return toks;
}