Example usage for org.apache.commons.csv CSVFormat DEFAULT

List of usage examples for org.apache.commons.csv CSVFormat DEFAULT

Introduction

In this page you can find the example usage for org.apache.commons.csv CSVFormat DEFAULT.

Prototype

CSVFormat DEFAULT

To view the source code for org.apache.commons.csv CSVFormat DEFAULT.

Click Source Link

Document

Standard comma separated format, as for #RFC4180 but allowing empty lines.

Usage

From source file:com.denimgroup.threadfix.service.waflog.RiverbedWebAppFirewallLogParser.java

/**
 * @param entryBuffer//from w ww  . ja v a2s  .  c o m
 * @return
 */
@Override
public SecurityEvent getSecurityEvent(String entry) {
    if (entry == null || entry.isEmpty() || entry.startsWith("#")) {
        return null;
    }

    // a logline is a csv encoded line with the following columns
    //  * [0] a timestamp: YYYYMMDD-HHMMSS in local time
    //  * [1] an internal session id or "default"
    //  * [2] internal cluster node id
    //  * [3] host header
    //  * [4] client ip
    //  * [5] HTTP method
    //  * [6] URL
    //  * [7] HTTP protocol version
    //  * [8] internal ruleset / rule id
    //  * [9] action
    //  * [10] protection or detection mode
    //  * [11] request or response
    //  * [12] handlerName - we only care for the THREADFIX_HANDLER_NAME here
    //  * [13] component which reject the request
    //  * [14] value which rejects the request
    //  * [16] error id (use this together with the timetamp to be unique)
    //  * [17] free text field
    //  * ... aditional stuff

    try {
        // we are using an iterator here because this
        // is the interface of this CSV parser 
        // however, we always feed only one line into
        // this parser so it is ok to return from this
        // loop and never continue
        Iterable<CSVRecord> parser = CSVFormat.DEFAULT.parse(new StringReader(entry));
        for (CSVRecord record : parser) {

            // We access elements 0 .. 17 later, so this has to have at least 18 elements
            if (record.size() < 18) {
                log.error("can't parse logline: " + entry);
                return null;
            }
            String csvTimestamp = record.get(0); // 20140131-172342
            String csvClientIP = record.get(4); // 10.17.23.41
            String csvRulesetMode = record.get(10); // P or D
            String csvHandlerName = record.get(12); // ThreadfixHandler
            String csvComponentName = record.get(13); // protection_ruleset
            String csvComponentValue = record.get(14); // threadfix:100042 or 100042
            String csvErrorId = record.get(16); // 1234567
            String csvFreeText = record.get(17); // free text which describe the action

            if (csvTimestamp == null || csvClientIP == null || csvHandlerName == null || csvRulesetMode == null
                    || csvComponentName == null || csvComponentValue == null || csvErrorId == null
                    || csvFreeText == null) {

                log.error("can't parse logline: " + entry);
                return null;
            }

            // we only care for THREADFIX_HANDLER_NAME here ... ignore all other stuff
            if (!csvHandlerName.equals(THREADFIX_HANDLER_NAME)) {
                log.debug("ignore unknown handler: " + csvHandlerName);
                return null;
            }

            // while the error id act more or less as
            // a unique id for rejected requests, this id
            // is too short to be really unique over a
            // long time. So we combine it here with the
            // timestamp to get a better native id
            String nativeId = csvTimestamp + "-" + csvErrorId;

            log.debug("native id: " + nativeId);

            if (securityEventDao.retrieveByNativeIdAndWafId(nativeId, wafId) != null) {
                return null;
            }

            String wafRuleId = null;
            if (csvComponentName.equals(THREADFIX_HANDLER_COMPONENT)) {
                // allow threadfix:123456 and 123456
                if (csvComponentValue.contains(":")) {
                    wafRuleId = csvComponentValue.split(":", 2)[1];
                } else {
                    wafRuleId = csvComponentValue;
                }
            } else {
                log.debug("ignore unknown component: " + csvComponentName);
                return null;
            }

            log.debug("wafRuleId " + wafRuleId);

            WafRule rule = wafRuleDao.retrieveByWafAndNativeId(wafId, wafRuleId);
            if (rule == null) {
                log.debug("wafRule not found");
                return null;
            }

            Calendar calendar = parseDate(csvTimestamp);

            if (calendar == null) {
                log.error("can't parse logline (timestamp): " + entry);
                return null;
            }

            SecurityEvent event = new SecurityEvent();

            event.setWafRule(rule);
            event.setImportTime(calendar);
            event.setLogText(csvFreeText);

            event.setAttackType("deny");
            //if (csvRulesetMode == WAF_LOG_MODE_PROTECTION)
            //{
            //    event.setAttackType("deny");
            //} else {
            //    event.setAttackType("log"); 
            //}
            event.setNativeId(nativeId);
            event.setAttackerIP(csvClientIP);

            return event;
        }
    } catch (IOException e) {
        return null;
    }
    return null;

}

From source file:com.adobe.acs.commons.exporters.impl.users.UserExportServletTest.java

@Test
public void testWithGroupBothFIlter() throws Exception {

    // Build parameters
    JsonObject params = buildParameterObject("", "allusers");

    Map<String, Object> parameters = new HashMap<>();
    parameters.put("params", params);

    context.request().setParameterMap(parameters);
    servlet.doGet(context.request(), context.response());

    assertEquals(context.response().getStatus(), 200);
    String output = context.response().getOutputAsString();

    CSVParser parser = CSVParser.parse(output, CSVFormat.DEFAULT.withHeader());
    assertAllUsersPresent(parser.getRecords(), "alice", "bob", "charly");
}

From source file:com.streamsets.pipeline.lib.parser.delimited.TestDelimitedCharDataParser.java

@Test
public void testParseIgnoreHeader() throws Exception {
    OverrunReader reader = new OverrunReader(new StringReader("A,B\na,b"), 1000, true, false);
    DataParser parser = new DelimitedCharDataParser(getContext(), "id", reader, 0, 0, CSVFormat.DEFAULT,
            CsvHeader.IGNORE_HEADER, -1, CsvRecordType.LIST);
    Assert.assertEquals("4", parser.getOffset());
    Record record = parser.parse();/*from  w  ww  .  j  av a  2 s.c om*/
    Assert.assertNotNull(record);
    Assert.assertEquals("id::4", record.getHeader().getSourceId());
    Assert.assertEquals("a",
            record.get().getValueAsList().get(0).getValueAsMap().get("value").getValueAsString());
    Assert.assertFalse(record.has("[0]/header"));
    Assert.assertEquals("b",
            record.get().getValueAsList().get(1).getValueAsMap().get("value").getValueAsString());
    Assert.assertFalse(record.has("[1]/header"));
    Assert.assertEquals("7", parser.getOffset());
    record = parser.parse();
    Assert.assertNull(record);
    Assert.assertEquals("-1", parser.getOffset());
    parser.close();
}

From source file:com.itemanalysis.jmetrik.file.JmetrikFileWriter.java

/**
 * Opens a connection to the file by instantiating a OutputStreamWriter and a CSVPrinter.
 * Assumes the output file is not a temporary file that should be deleted. The output file
 * is a permanent file./* w w w . jav  a 2 s. c om*/
 *
 * @throws IOException
 */
public void openConnection() throws IOException {
    writer = new BufferedWriter(new OutputStreamWriter(Files.newOutputStream(file)));
    printer = new CSVPrinter(writer, CSVFormat.DEFAULT.withCommentMarker('#'));
}

From source file:com.thinkbiganalytics.discovery.parsers.csv.CSVAutoDetect.java

private Character guessDelimiter(List<LineStats> lineStats, String value, Character quote, boolean headerRow)
        throws IOException {

    // Assume delimiter exists in first line and compare to subsequent lines
    if (lineStats.size() > 0) {
        LineStats firstLineStat = lineStats.get(0);
        Map<Character, Integer> firstLineDelimCounts = firstLineStat.calcDelimCountsOrdered();
        if (firstLineDelimCounts != null && firstLineDelimCounts.size() > 0) {
            List<Character> candidates = new ArrayList<>();
            // Attempt to parse given delimiter
            Set<Character> firstLineDelimKeys = firstLineDelimCounts.keySet();
            for (Character delim : firstLineDelimKeys) {
                CSVFormat format;/*w w w .jav  a  2s .c o m*/
                if (headerRow) {
                    format = CSVFormat.DEFAULT.withFirstRecordAsHeader().withDelimiter(delim).withQuote(quote);
                } else {
                    format = CSVFormat.DEFAULT.withDelimiter(delim).withQuote(quote);
                }
                try (StringReader sr = new StringReader(value)) {
                    try (CSVParser parser = format.parse(sr)) {
                        if (parser.getHeaderMap() != null) {
                            int size = parser.getHeaderMap().size();
                            List<CSVRecord> records = parser.getRecords();
                            boolean match = records.stream().allMatch(record -> record.size() == size);
                            if (match) {
                                return delim;
                            }
                        }
                    }
                }
                Integer delimCount = firstLineDelimCounts.get(delim);
                boolean match = true;
                for (int i = 1; i < lineStats.size() && match; i++) {
                    LineStats thisLine = lineStats.get(i);
                    Integer rowDelimCount = thisLine.delimStats.get(delim);
                    match = delimCount.equals(rowDelimCount);
                }
                if (match) {
                    candidates.add(delim);
                }
            }
            if (candidates.size() > 0) {
                // All agree on a single delimiter
                if (candidates.size() == 1) {
                    return candidates.get(0);
                } else {
                    int count = 0;
                    // Return highest delimiter from candidates
                    for (Character delim : firstLineDelimKeys) {
                        if (candidates.get(count++) != null) {
                            return delim;
                        }
                    }
                }
            }
        }
    }
    return null;
}

From source file:acmi.l2.clientmod.xdat.XdatEditor.java

private void loadSchema() {
    String versionsFilePath = "/versions.csv";
    try (CSVParser parser = new CSVParser(
            new InputStreamReader(getClass().getResourceAsStream(versionsFilePath)), CSVFormat.DEFAULT)) {
        for (CSVRecord record : parser.getRecords()) {
            String name = record.get(0);
            String className = record.get(1);
            controller.registerVersion(name, className);
        }/*from   ww  w .  j a  v  a  2  s  . c  o m*/
    } catch (Exception e) {
        log.log(Level.WARNING, versionsFilePath + " read error", e);
        Dialogs.show(Alert.AlertType.WARNING, e.getClass().getSimpleName(), null, e.getMessage());
    }
}

From source file:com.joeyfrazee.nifi.processors.DuplicateByAttribute.java

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();/*from  w w  w.j  a  v  a 2  s. c  o  m*/
    if (flowFile == null) {
        return;
    }

    String attributeToDuplicateBy = context.getProperty(ATTRIBUTE_TO_DUPLICATE_BY)
            .evaluateAttributeExpressions(flowFile).getValue();

    String outputAttribute = context.getProperty(OUTPUT_ATTRIBUTE).evaluateAttributeExpressions(flowFile)
            .getValue();

    try {
        final String csv = flowFile.getAttribute(attributeToDuplicateBy);
        final CSVParser parser = CSVParser.parse(csv, CSVFormat.DEFAULT);
        for (final CSVRecord record : parser) {
            for (final String v : record) {
                FlowFile copy = session.clone(flowFile);
                copy = session.removeAttribute(copy, attributeToDuplicateBy);
                copy = session.putAttribute(copy, outputAttribute, v);
                session.transfer(copy, REL_SUCCESS);
            }
        }
    } catch (Exception e) {
        getLogger().error("{} value {} could not be parsed",
                new Object[] { ATTRIBUTE_TO_DUPLICATE_BY.getName(), attributeToDuplicateBy }, e);
        session.transfer(flowFile, REL_FAILURE);
        return;
    }

    session.transfer(flowFile, REL_ORIGINAL);
}

From source file:edu.harvard.mcz.imagecapture.loader.JobVerbatimFieldLoad.java

@Override
public void start() {
    startDateTime = new Date();
    Singleton.getSingletonInstance().getJobList().addJob((RunnableJob) this);
    runStatus = RunStatus.STATUS_RUNNING;

    String selectedFilename = "";

    if (file == null) {
        final JFileChooser fileChooser = new JFileChooser();
        fileChooser.setFileSelectionMode(JFileChooser.FILES_AND_DIRECTORIES);
        if (Singleton.getSingletonInstance().getProperties().getProperties()
                .getProperty(ImageCaptureProperties.KEY_LASTLOADPATH) != null) {
            fileChooser.setCurrentDirectory(new File(Singleton.getSingletonInstance().getProperties()
                    .getProperties().getProperty(ImageCaptureProperties.KEY_LASTLOADPATH)));
        }/*ww  w . j ava2  s .  c  o m*/

        int returnValue = fileChooser.showOpenDialog(Singleton.getSingletonInstance().getMainFrame());
        if (returnValue == JFileChooser.APPROVE_OPTION) {
            file = fileChooser.getSelectedFile();
        }
    }

    if (file != null) {
        log.debug("Selected file to load: " + file.getName() + ".");

        if (file.exists() && file.isFile() && file.canRead()) {
            // Save location
            Singleton.getSingletonInstance().getProperties().getProperties()
                    .setProperty(ImageCaptureProperties.KEY_LASTLOADPATH, file.getPath());
            selectedFilename = file.getName();

            String[] headers = new String[] {};

            CSVFormat csvFormat = CSVFormat.DEFAULT.withHeader(headers);
            int rows = 0;
            try {
                rows = readRows(file, csvFormat);
            } catch (FileNotFoundException e) {
                JOptionPane.showMessageDialog(Singleton.getSingletonInstance().getMainFrame(),
                        "Unable to load data, file not found: " + e.getMessage(), "Error: File Not Found",
                        JOptionPane.OK_OPTION);
                errors.append("File not found ").append(e.getMessage()).append("\n");
                log.error(e.getMessage(), e);
            } catch (IOException e) {
                errors.append("Error loading csv format, trying tab delimited: ").append(e.getMessage())
                        .append("\n");
                log.debug(e.getMessage());
                try {
                    // try reading as tab delimited format, if successful, use that format.
                    CSVFormat tabFormat = CSVFormat.newFormat('\t').withIgnoreSurroundingSpaces(true)
                            .withHeader(headers).withQuote('"');
                    rows = readRows(file, tabFormat);
                    csvFormat = tabFormat;
                } catch (IOException e1) {
                    errors.append("Error Loading data: ").append(e1.getMessage()).append("\n");
                    log.error(e.getMessage(), e1);
                }
            }

            try {
                Reader reader = new FileReader(file);

                CSVParser csvParser = new CSVParser(reader, csvFormat);

                Map<String, Integer> csvHeader = csvParser.getHeaderMap();
                headers = new String[csvHeader.size()];
                int i = 0;
                for (String header : csvHeader.keySet()) {
                    headers[i++] = header;
                    log.debug(header);
                }

                boolean okToRun = true;
                //TODO: Work picking/checking responsibility into a FieldLoaderWizard
                List<String> headerList = Arrays.asList(headers);
                if (!headerList.contains("barcode")) {
                    log.error("Input file " + file.getName()
                            + " header does not contain required field 'barcode'.");
                    // no barcode field, we can't match the input to specimen records.
                    errors.append("Field \"barcode\" not found in csv file headers.  Unable to load data.")
                            .append("\n");
                    okToRun = false;
                }

                if (okToRun) {

                    Iterator<CSVRecord> iterator = csvParser.iterator();

                    FieldLoader fl = new FieldLoader();

                    if (headerList.size() == 3 && headerList.contains("verbatimUnclassifiedText")
                            && headerList.contains("questions") && headerList.contains("barcode")) {
                        log.debug("Input file matches case 1: Unclassified text only.");
                        // Allowed case 1a: unclassified text only

                        int confirm = JOptionPane.showConfirmDialog(
                                Singleton.getSingletonInstance().getMainFrame(),
                                "Confirm load from file " + selectedFilename + " (" + rows
                                        + " rows) with just barcode and verbatimUnclassifiedText",
                                "Verbatim unclassified Field found for load", JOptionPane.OK_CANCEL_OPTION);
                        if (confirm == JOptionPane.OK_OPTION) {
                            String barcode = "";
                            int lineNumber = 0;
                            while (iterator.hasNext()) {
                                lineNumber++;
                                counter.incrementSpecimens();
                                CSVRecord record = iterator.next();
                                try {
                                    String verbatimUnclassifiedText = record.get("verbatimUnclassifiedText");
                                    barcode = record.get("barcode");
                                    String questions = record.get("questions");

                                    fl.load(barcode, verbatimUnclassifiedText, questions, true);
                                    counter.incrementSpecimensUpdated();
                                } catch (IllegalArgumentException e) {
                                    RunnableJobError error = new RunnableJobError(file.getName(), barcode,
                                            Integer.toString(lineNumber), e.getClass().getSimpleName(), e,
                                            RunnableJobError.TYPE_LOAD_FAILED);
                                    counter.appendError(error);
                                    log.error(e.getMessage(), e);
                                } catch (LoadException e) {
                                    RunnableJobError error = new RunnableJobError(file.getName(), barcode,
                                            Integer.toString(lineNumber), e.getClass().getSimpleName(), e,
                                            RunnableJobError.TYPE_LOAD_FAILED);
                                    counter.appendError(error);
                                    log.error(e.getMessage(), e);
                                }
                                percentComplete = (int) ((lineNumber * 100f) / rows);
                                this.setPercentComplete(percentComplete);
                            }
                        } else {
                            errors.append("Load canceled by user.").append("\n");
                        }
                    } else if (headerList.size() == 4 && headerList.contains("verbatimUnclassifiedText")
                            && headerList.contains("questions") && headerList.contains("barcode")
                            && headerList.contains("verbatimClusterIdentifier")) {
                        log.debug(
                                "Input file matches case 1: Unclassified text only (with cluster identifier).");
                        // Allowed case 1b: unclassified text only (including cluster identifier)

                        int confirm = JOptionPane.showConfirmDialog(
                                Singleton.getSingletonInstance().getMainFrame(),
                                "Confirm load from file " + selectedFilename + " (" + rows
                                        + " rows) with just barcode and verbatimUnclassifiedText",
                                "Verbatim unclassified Field found for load", JOptionPane.OK_CANCEL_OPTION);
                        if (confirm == JOptionPane.OK_OPTION) {
                            String barcode = "";
                            int lineNumber = 0;
                            while (iterator.hasNext()) {
                                lineNumber++;
                                counter.incrementSpecimens();
                                CSVRecord record = iterator.next();
                                try {
                                    String verbatimUnclassifiedText = record.get("verbatimUnclassifiedText");
                                    String verbatimClusterIdentifier = record.get("verbatimClusterIdentifier");
                                    barcode = record.get("barcode");
                                    String questions = record.get("questions");

                                    fl.load(barcode, verbatimUnclassifiedText, verbatimClusterIdentifier,
                                            questions, true);
                                    counter.incrementSpecimensUpdated();
                                } catch (IllegalArgumentException e) {
                                    RunnableJobError error = new RunnableJobError(file.getName(), barcode,
                                            Integer.toString(lineNumber), e.getClass().getSimpleName(), e,
                                            RunnableJobError.TYPE_LOAD_FAILED);
                                    counter.appendError(error);
                                    log.error(e.getMessage(), e);
                                } catch (LoadException e) {
                                    RunnableJobError error = new RunnableJobError(file.getName(), barcode,
                                            Integer.toString(lineNumber), e.getClass().getSimpleName(), e,
                                            RunnableJobError.TYPE_LOAD_FAILED);
                                    counter.appendError(error);
                                    log.error(e.getMessage(), e);
                                }
                                percentComplete = (int) ((lineNumber * 100f) / rows);
                                this.setPercentComplete(percentComplete);
                            }
                        } else {
                            errors.append("Load canceled by user.").append("\n");
                        }

                    } else if (headerList.size() == 8 && headerList.contains("verbatimUnclassifiedText")
                            && headerList.contains("questions") && headerList.contains("barcode")
                            && headerList.contains("verbatimLocality") && headerList.contains("verbatimDate")
                            && headerList.contains("verbatimNumbers")
                            && headerList.contains("verbatimCollector")
                            && headerList.contains("verbatimCollection")) {
                        // Allowed case two, transcription into verbatim fields, must be exact list of all
                        // verbatim fields, not including cluster identifier or other metadata.
                        log.debug("Input file matches case 2: Full list of verbatim fields.");

                        int confirm = JOptionPane.showConfirmDialog(
                                Singleton.getSingletonInstance().getMainFrame(),
                                "Confirm load from file " + selectedFilename + " (" + rows
                                        + " rows) with just barcode and verbatim fields.",
                                "Verbatim Fields found for load", JOptionPane.OK_CANCEL_OPTION);
                        if (confirm == JOptionPane.OK_OPTION) {

                            String barcode = "";
                            int lineNumber = 0;
                            while (iterator.hasNext()) {
                                lineNumber++;
                                counter.incrementSpecimens();
                                CSVRecord record = iterator.next();
                                try {
                                    String verbatimLocality = record.get("verbatimLocality");
                                    String verbatimDate = record.get("verbatimDate");
                                    String verbatimCollector = record.get("verbatimCollector");
                                    String verbatimCollection = record.get("verbatimCollection");
                                    String verbatimNumbers = record.get("verbatimNumbers");
                                    String verbatimUnclasifiedText = record.get("verbatimUnclassifiedText");
                                    barcode = record.get("barcode");
                                    String questions = record.get("questions");

                                    fl.load(barcode, verbatimLocality, verbatimDate, verbatimCollector,
                                            verbatimCollection, verbatimNumbers, verbatimUnclasifiedText,
                                            questions);
                                    counter.incrementSpecimensUpdated();
                                } catch (IllegalArgumentException e) {
                                    RunnableJobError error = new RunnableJobError(file.getName(), barcode,
                                            Integer.toString(lineNumber), e.getClass().getSimpleName(), e,
                                            RunnableJobError.TYPE_LOAD_FAILED);
                                    counter.appendError(error);
                                    log.error(e.getMessage(), e);
                                } catch (LoadException e) {
                                    RunnableJobError error = new RunnableJobError(file.getName(), barcode,
                                            Integer.toString(lineNumber), e.getClass().getSimpleName(), e,
                                            RunnableJobError.TYPE_LOAD_FAILED);
                                    counter.appendError(error);
                                    log.error(e.getMessage(), e);
                                }
                                percentComplete = (int) ((lineNumber * 100f) / rows);
                                this.setPercentComplete(percentComplete);
                            }
                        } else {
                            errors.append("Load canceled by user.").append("\n");
                        }

                    } else {
                        // allowed case three, transcription into arbitrary sets verbatim or other fields
                        log.debug("Input file case 3: Arbitrary set of fields.");

                        // Check column headers before starting run.
                        boolean headersOK = false;

                        try {
                            HeaderCheckResult headerCheck = fl.checkHeaderList(headerList);
                            if (headerCheck.isResult()) {
                                int confirm = JOptionPane.showConfirmDialog(
                                        Singleton.getSingletonInstance().getMainFrame(),
                                        "Confirm load from file " + selectedFilename + " (" + rows
                                                + " rows) with headers: \n"
                                                + headerCheck.getMessage().replaceAll(":", ":\n"),
                                        "Fields found for load", JOptionPane.OK_CANCEL_OPTION);
                                if (confirm == JOptionPane.OK_OPTION) {
                                    headersOK = true;
                                } else {
                                    errors.append("Load canceled by user.").append("\n");
                                }
                            } else {
                                int confirm = JOptionPane.showConfirmDialog(
                                        Singleton.getSingletonInstance().getMainFrame(),
                                        "Problem found with headers in file, try to load anyway?\nHeaders: \n"
                                                + headerCheck.getMessage().replaceAll(":", ":\n"),
                                        "Problem in fields for load", JOptionPane.OK_CANCEL_OPTION);
                                if (confirm == JOptionPane.OK_OPTION) {
                                    headersOK = true;
                                } else {
                                    errors.append("Load canceled by user.").append("\n");
                                }
                            }
                        } catch (LoadException e) {
                            errors.append("Error loading data: \n").append(e.getMessage()).append("\n");
                            JOptionPane.showMessageDialog(Singleton.getSingletonInstance().getMainFrame(),
                                    e.getMessage().replaceAll(":", ":\n"), "Error Loading Data: Problem Fields",
                                    JOptionPane.ERROR_MESSAGE);

                            log.error(e.getMessage(), e);
                        }

                        if (headersOK) {
                            int lineNumber = 0;
                            while (iterator.hasNext()) {
                                lineNumber++;
                                Map<String, String> data = new HashMap<String, String>();
                                CSVRecord record = iterator.next();
                                String barcode = record.get("barcode");
                                Iterator<String> hi = headerList.iterator();
                                boolean containsNonVerbatim = false;
                                while (hi.hasNext()) {
                                    String header = hi.next();
                                    // Skip any fields prefixed by the underscore character _
                                    if (!header.equals("barcode") && !header.startsWith("_")) {
                                        data.put(header, record.get(header));
                                        if (!header.equals("questions")
                                                && MetadataRetriever.isFieldExternallyUpdatable(Specimen.class,
                                                        header)
                                                && MetadataRetriever.isFieldVerbatim(Specimen.class, header)) {
                                            containsNonVerbatim = true;
                                        }
                                    }
                                }
                                if (data.size() > 0) {
                                    try {
                                        boolean updated = false;
                                        if (containsNonVerbatim) {
                                            updated = fl.loadFromMap(barcode, data,
                                                    WorkFlowStatus.STAGE_CLASSIFIED, true);
                                        } else {
                                            updated = fl.loadFromMap(barcode, data,
                                                    WorkFlowStatus.STAGE_VERBATIM, true);
                                        }
                                        counter.incrementSpecimens();
                                        if (updated) {
                                            counter.incrementSpecimensUpdated();
                                        }
                                    } catch (HibernateException e1) {
                                        // Catch (should just be development) problems with the underlying query 
                                        StringBuilder message = new StringBuilder();
                                        message.append("Query Error loading row (").append(lineNumber)
                                                .append(")[").append(barcode).append("]")
                                                .append(e1.getMessage());
                                        RunnableJobError err = new RunnableJobError(selectedFilename, barcode,
                                                Integer.toString(lineNumber), e1.getMessage(), e1,
                                                RunnableJobError.TYPE_LOAD_FAILED);
                                        counter.appendError(err);
                                        log.error(e1.getMessage(), e1);

                                    } catch (LoadException e) {
                                        StringBuilder message = new StringBuilder();
                                        message.append("Error loading row (").append(lineNumber).append(")[")
                                                .append(barcode).append("]").append(e.getMessage());

                                        RunnableJobError err = new RunnableJobError(selectedFilename, barcode,
                                                Integer.toString(lineNumber), e.getMessage(), e,
                                                RunnableJobError.TYPE_LOAD_FAILED);

                                        counter.appendError(err);
                                        // errors.append(message.append("\n").toString());
                                        log.error(e.getMessage(), e);
                                    }
                                }
                                percentComplete = (int) ((lineNumber * 100f) / rows);
                                this.setPercentComplete(percentComplete);
                            }
                        } else {
                            String message = "Can't load data, problem with headers.";
                            errors.append(message).append("\n");
                            log.error(message);
                        }
                    }
                }
                csvParser.close();
                reader.close();
            } catch (FileNotFoundException e) {
                JOptionPane.showMessageDialog(Singleton.getSingletonInstance().getMainFrame(),
                        "Unable to load data, file not found: " + e.getMessage(), "Error: File Not Found",
                        JOptionPane.OK_OPTION);
                errors.append("File not found ").append(e.getMessage()).append("\n");
                log.error(e.getMessage(), e);
            } catch (IOException e) {
                errors.append("Error Loading data: ").append(e.getMessage()).append("\n");
                log.error(e.getMessage(), e);
            }
        }

    } else {
        //TODO: handle error condition
        log.error("File selection cancelled by user.");
    }

    report(selectedFilename);
    done();
}

From source file:com.publictransitanalytics.scoregenerator.datalayer.directories.GTFSReadingStopTimesDirectory.java

private void parseFrequenciesFile(final ImmutableMultimap.Builder<String, FrequencyRecord> builder,
        final Reader frequenciesReader) throws FileNotFoundException, IOException {

    final CSVParser frequenciesParser = new CSVParser(frequenciesReader, CSVFormat.DEFAULT.withHeader());
    final List<CSVRecord> frequenciesRecords = frequenciesParser.getRecords();

    for (CSVRecord record : frequenciesRecords) {
        final String tripId = record.get("trip_id");

        final FrequencyRecord frequencyRecord = new FrequencyRecord(tripId,
                TransitTime.parse(record.get("start_time")), TransitTime.parse(record.get("end_time")),
                Duration.ofSeconds(Long.parseLong(record.get("headway_secs"))));
        builder.put(tripId, frequencyRecord);
    }//w w w.  j  a  va 2s.com
}

From source file:com.publictransitanalytics.scoregenerator.datalayer.directories.GTFSReadingServiceTypeCalendar.java

private void parseCalendarDatesFile(final Reader calendarDatesReader,
        final Multimap<LocalDate, String> serviceTypesMap) throws FileNotFoundException, IOException {

    final CSVParser calendarDatesParser = new CSVParser(calendarDatesReader, CSVFormat.DEFAULT.withHeader());
    final List<CSVRecord> calendarDatesRecords = calendarDatesParser.getRecords();

    for (final CSVRecord record : calendarDatesRecords) {
        final String serviceType = record.get("service_id");
        final LocalDate date = LocalDate.parse(record.get("date"), DateTimeFormatter.BASIC_ISO_DATE);
        final String exceptionType = record.get("exception_type");

        switch (exceptionType) {
        case "1":
            serviceTypesMap.put(date, serviceType);
            break;
        case "2":
            serviceTypesMap.remove(date, serviceType);
            break;
        default:/*ww  w.j  a v  a2  s .  c  o  m*/
            throw new ScoreGeneratorFatalException(String.format("Invalid exception type %s", exceptionType));
        }
    }
}