List of usage examples for org.apache.commons.io LineIterator closeQuietly
public static void closeQuietly(LineIterator iterator)
From source file:nl.knaw.huygens.timbuctoo.tools.importer.neww.WomenWritersImporter.java
private int importCollectives() throws Exception { int initialSize = references.size(); LineIterator iterator = getLineIterator("collectives.json"); try {/* www .j av a 2 s . c om*/ while (iterator.hasNext()) { String line = preprocessJson(iterator.nextLine()); if (!line.isEmpty()) { handleCollective(line); } } } finally { LineIterator.closeQuietly(iterator); } return references.size() - initialSize; }
From source file:nl.knaw.huygens.timbuctoo.tools.importer.neww.WomenWritersImporter.java
private int importSourceDocuments() throws Exception { int initialSize = references.size(); Reference relationTypeRef = relationTypes.get("hasSourceCategory"); List<String> ignoredCategories = Lists.newArrayList("", "-", "TBD"); LineIterator iterator = getLineIterator("documents.json"); String line = ""; try {/*from ww w . ja va 2s . c o m*/ while (iterator.hasNext()) { line = preprocessJson(iterator.nextLine()); if (!line.isEmpty()) { String json = preprocessDocumentJson(line); XDocument object = objectMapper.readValue(json, XDocument.class); if (object != null && object.source != null) { String title = filterField(object.source.full_name); String key = newKey("SourceDocument", title); if (title != null && !title.startsWith(JUNK_SOURCE) && !references.containsKey(key)) { WWDocument document = new WWDocument(); document.setSource(true); document.setTitle(title); document.setNotes(filterNotesField(object.source.notes)); String storedId = addDomainEntity(WWDocument.class, document); Reference documentRef = new Reference(Document.class, storedId); references.put(key, documentRef); String category = StringUtils.trimToEmpty(object.source.type); Reference keywordRef = keywords.lookup(DOC_SOURCE_TYPE, category); if (keywordRef != null) { addRelation(WWRelation.class, relationTypeRef, documentRef, keywordRef, change, ""); } else if (!ignoredCategories.contains(category)) { System.out.printf("Undefined source category [%s] for [%s]%n", category, title); } } } } } } catch (JsonMappingException e) { System.out.println(line); throw e; } finally { LineIterator.closeQuietly(iterator); } return references.size() - initialSize; }
From source file:nl.knaw.huygens.timbuctoo.tools.importer.neww.WomenWritersImporter.java
private int importRegularDocuments() throws Exception { int initialSize = references.size(); documentTypeMap = createDocumentTypeMap(); File file = new File(inputDir, "publishers.txt"); publisherNormalizer = new PublisherNormalizer(file); LineIterator iterator = getLineIterator("documents.json"); String line = ""; try {/*ww w. j ava 2 s.co m*/ while (iterator.hasNext()) { line = preprocessJson(iterator.nextLine()); if (!line.isEmpty()) { handleDocument(preprocessDocumentJson(line)); } } } catch (JsonMappingException e) { System.out.println(line); throw e; } finally { LineIterator.closeQuietly(iterator); publisherNormalizer = null; } return references.size() - initialSize; }
From source file:nl.knaw.huygens.timbuctoo.tools.importer.neww.WomenWritersImporter.java
private int importKeywords() throws Exception { int initialSize = references.size(); LineIterator iterator = getLineIterator("keywords.json"); try {/*w w w .j a va2 s . c om*/ while (iterator.hasNext()) { String line = preprocessJson(iterator.nextLine()); if (!line.isEmpty()) { handleKeyword(line); } } } finally { LineIterator.closeQuietly(iterator); } return references.size() - initialSize; }
From source file:nl.knaw.huygens.timbuctoo.tools.importer.neww.WomenWritersImporter.java
private int importLanguages() throws Exception { int initialSize = references.size(); Map<String, String> map = createNameCodeMap(); LineIterator iterator = getLineIterator("languages.json"); String line = ""; try {//from w ww. j a v a2s .c om System.out.printf("\"language\",\"ISO-code\",\"ISO-name\"%n"); while (iterator.hasNext()) { line = preprocessJson(iterator.nextLine()); if (!line.isEmpty()) { XLanguage object = objectMapper.readValue(line, XLanguage.class); String name = verifyNonEmptyField(line, "name", filterField(object.name)); if (name == null) { invalids.add(newKey("Language", object.tempid)); } else { String code = mapName(map, name); // Get WWLanguage instance with values of primitive entity Language WWLanguage language = repository.getLanguageByCode(WWLanguage.class, code); if (language == null) { verifyNonEmptyField(line, "name", null); } else { String flag = name.equals(language.getName()) ? "" : " *"; System.out.printf("%-30s%-8s%-30s%s%n", name, language.getCode(), language.getName(), flag); language.setCore(true); // TODO prevent multiple updates for same language updateProjectDomainEntity(WWLanguage.class, language); String key = newKey("Language", object.tempid); storeReference(key, WWLanguage.class, language.getId()); } } } } } catch (JsonMappingException e) { System.out.println(line); throw e; } finally { LineIterator.closeQuietly(iterator); } return references.size() - initialSize; }
From source file:nl.knaw.huygens.timbuctoo.tools.importer.neww.WomenWritersImporter.java
private int importLocations() throws Exception { setupConc();/*from w w w . j a v a 2 s .c o m*/ int initialSize = references.size(); LineIterator iterator = getLineIterator("locations.json"); try { while (iterator.hasNext()) { String line = preprocessJson(iterator.nextLine()); if (!line.isEmpty()) { handleLocation(preprocessLocation(line)); } } } finally { conc.clear(); LineIterator.closeQuietly(iterator); } return references.size() - initialSize; }
From source file:nl.knaw.huygens.timbuctoo.tools.importer.neww.WomenWritersImporter.java
private int importPersons(Set<String> collaboratorIds) throws Exception { int initialSize = references.size(); ckccMap = ckccConcordance();// w w w.j a va2 s. co m LineIterator iterator = getLineIterator("persons.json"); String line = ""; try { while (iterator.hasNext()) { line = preprocessJson(iterator.nextLine()); if (!line.isEmpty()) { handlePerson(preprocessPerson(line), collaboratorIds); } } } catch (JsonMappingException e) { System.out.println(line); throw e; } finally { LineIterator.closeQuietly(iterator); } for (String type : types) { System.out.printf("type %s%n", type); } lines.clear(); return references.size() - initialSize; }
From source file:nl.knaw.huygens.timbuctoo.tools.importer.neww.WomenWritersImporter.java
private Set<String> collectCollaborators() throws Exception { Set<String> ids = Sets.newHashSet(); LineIterator iterator = getLineIterator("relations.json"); String line = ""; try {/*from ww w.j a v a 2s . c o m*/ while (iterator.hasNext()) { line = preprocessJson(iterator.nextLine()); if (!line.isEmpty()) { XRelation object = objectMapper.readValue(line, XRelation.class); String relationType = filterField(object.relation_type); if ("collaborated_with".equals(relationType)) { String leftObject = verifyNonEmptyField(line, "leftObject", filterField(object.leftObject)); String leftId = verifyNonEmptyField(line, "leftId", filterField(object.leftId)); if ("Person".equals(leftObject)) { ids.add(leftId); } } } } } catch (Exception e) { throw e; } finally { LineIterator.closeQuietly(iterator); } return ids; }
From source file:nl.knaw.huygens.timbuctoo.tools.importer.neww.WomenWritersImporter.java
private void importRelations() throws Exception { relationTypeConcordance = getRelationTypeConcordance(); LineIterator iterator = getLineIterator("relations.json"); String line = ""; try {/* w w w. j av a 2 s .c om*/ while (iterator.hasNext()) { line = preprocessJson(iterator.nextLine()); if (!line.isEmpty()) { handleRelation(line); } } } catch (Exception e) { System.out.println(line); throw e; } finally { LineIterator.closeQuietly(iterator); relationTypeConcordance = null; } }
From source file:org.adf.emg.sonar.ojaudit.JavaMetricsDecorator.java
@Override public void decorate(Resource resource, DecoratorContext context) { if (!(Qualifiers.isFile(resource) && resource.getName().endsWith(".java"))) { // only process .java files return;//from ww w.j av a 2 s .com } ProjectFileSystem fileSystem = context.getProject().getFileSystem(); File file = lookup(resource, fileSystem); LineIterator iterator = null; int numLines = 0; int numBlankLines = 0; int numCommentLines = 0; try { Charset charset = fileSystem.getSourceCharset(); iterator = charset == null ? FileUtils.lineIterator(file) : FileUtils.lineIterator(file, charset.name()); boolean inComment = false; while (iterator.hasNext()) { String trimmedLine = iterator.nextLine().trim(); numLines++; boolean lineHasCode = false; boolean lineHasComment = false; while (!trimmedLine.isEmpty()) { if (inComment) { // in a comment. try to find end marker int endIndex = trimmedLine.indexOf(END_COMMENT); if (endIndex == -1) { // (rest of) line is comment lineHasComment = true; trimmedLine = ""; // remove comment } else { // remove comment to see if there is code after it lineHasComment = true; trimmedLine = trimmedLine.substring(endIndex + END_COMMENT.length()); inComment = false; } } else { // not in a comment if (trimmedLine.startsWith("//")) { trimmedLine = ""; continue; } // try to find begin comment marker int startIndex = trimmedLine.indexOf(START_COMMENT); if (startIndex == -1) { // (rest of) line is non-comment lineHasCode = true; trimmedLine = ""; // remove non-comment } else if (startIndex == 0) { // line starts with start marker inComment = true; trimmedLine = trimmedLine.substring(startIndex + START_COMMENT.length()); } else { // line contains start marker lineHasCode = true; inComment = true; trimmedLine = trimmedLine.substring(startIndex + START_COMMENT.length()); } } trimmedLine = trimmedLine.trim(); } if (!lineHasCode) { if (lineHasComment) { numCommentLines++; } else { numBlankLines++; } } } } catch (IOException e) { LOG.warn("error reading " + file + " to collect metrics", e); } finally { LineIterator.closeQuietly(iterator); } context.saveMeasure(CoreMetrics.LINES, (double) numLines); // Lines context.saveMeasure(CoreMetrics.COMMENT_LINES, (double) numCommentLines); // Non Commenting Lines of Code context.saveMeasure(CoreMetrics.NCLOC, (double) numLines - numBlankLines - numCommentLines); // Comment Lines }