List of usage examples for org.apache.commons.io LineIterator hasNext
public boolean hasNext()
Reader
has more lines. From source file:com.jpetrak.gate.scala.ScalaScriptPR.java
public void tryCompileScript() { String scalaProgramSource;//from w ww. ja v a2s . c o m String className; if (classloader != null) { Gate.getClassLoader().forgetClassLoader(classloader); } classloader = Gate.getClassLoader().getDisposableClassLoader( //"C"+java.util.UUID.randomUUID().toString().replaceAll("-", ""), scalaProgramUrl.toExternalForm() + System.currentTimeMillis(), this.getClass().getClassLoader(), true); try { className = "ScalaScriptClass" + getNextId(); StringBuilder sb = new StringBuilder(); scalaProgramLines = new ArrayList<String>(); scalaProgramLines.add(fileProlog); scalaProgramLines.add(classProlog.replaceAll("THECLASSNAME", className)); LineIterator it = FileUtils.lineIterator(scalaProgramFile, "UTF-8"); try { while (it.hasNext()) { String line = it.nextLine(); scalaProgramLines.add(line); } } finally { LineIterator.closeQuietly(it); } scalaProgramLines.add(scalaCompiler.getClassEpilog().replaceAll("THECLASSNAME", className)); for (String line : scalaProgramLines) { sb.append(line); sb.append("\n"); } scalaProgramSource = sb.toString(); //System.out.println("Program Source: " + scalaProgramSource); } catch (IOException ex) { System.err.println("Problem reading program from " + scalaProgramUrl); ex.printStackTrace(System.err); return; } try { //System.out.println("Trying to compile ..."); scalaProgramClass = scalaCompiler.compile(className, scalaProgramSource, classloader); //scalaProgramClass = (ScalaScript) Gate.getClassLoader(). // loadClass("scalascripting." + className).newInstance(); scalaProgramClass.globalsForPr = globalsForPr; scalaProgramClass.lockForPr = new Object(); if (registeredEditorVR != null) { registeredEditorVR.setCompilationOk(); } scalaProgramClass.resource1 = resource1; scalaProgramClass.resource2 = resource2; scalaProgramClass.resource3 = resource3; isCompileError = false; scalaProgramClass.resetInitAll(); } catch (Exception ex) { System.err.println("Problem compiling ScalaScript Class"); printScalaProgram(System.err); ex.printStackTrace(System.err); if (classloader != null) { Gate.getClassLoader().forgetClassLoader(classloader); classloader = null; } isCompileError = true; scalaProgramClass = null; if (registeredEditorVR != null) { registeredEditorVR.setCompilationError(); } return; } }
From source file:net.mindengine.blogix.web.routes.DefaultRoutesParser.java
@Override public List<Route> parseRoutes(File file) throws IOException { LineIterator it = FileUtils.lineIterator(file, "UTF-8"); List<Route> routes = new LinkedList<Route>(); Route currentRoute = null;/*from www . ja v a 2s. co m*/ while (it.hasNext()) { String line = it.nextLine(); if (line.trim().isEmpty()) { currentRoute = null; } else if (line.startsWith(" ")) { loadModelEntryForRoute(currentRoute, line); } else { currentRoute = parseLine(line.trim()); if (currentRoute != null) { routes.add(currentRoute); } } } return routes; }
From source file:at.sti2.spark.streamer.SparkStreamer.java
private void stream(File fileToStream) { PrintWriter streamWriter = null; LineIterator lineIterator = null; long Counter = 0; try {//from w ww . j a v a2 s .c o m sock = new Socket("localhost", port); } catch (IOException e) { logger.debug("Cannot connect to server."); System.exit(1); } logger.info("Connected."); try { streamWriter = new PrintWriter(sock.getOutputStream()); lineIterator = FileUtils.lineIterator(fileToStream, "UTF-8"); logger.info("Beginning to stream."); Date startStreaming = new Date(); String line = null; while (lineIterator.hasNext()) { line = lineIterator.nextLine(); streamWriter.println(line); Counter++; // if (tripleCounter%1000 == 0){ // long currentTimepoint = (new Date()).getTime(); // System.out.println("Processing " + (1000/(currentTimepoint - timepoint)) + " triples/sec."); // timepoint = currentTimepoint; // streamWriter.flush(); // } } streamWriter.flush(); Date endStreaming = new Date(); logger.info("End of streaming."); logger.info("Streamed " + Counter + " triples/lines."); logger.info("Total streaming time " + (endStreaming.getTime() - startStreaming.getTime()) + " ms."); } catch (IOException e) { logger.error(e.getMessage()); } finally { IOUtils.closeQuietly(streamWriter); lineIterator.close(); logger.info("Disconnected."); } }
From source file:dk.netarkivet.harvester.datamodel.PartialHarvest.java
/** * This method is a duplicate of the addSeeds method but for seedsFile parameter * * @param seedsFile a newline-separated File containing the seeds to be added * @param templateName the name of the template to be used * @param maxBytes Maximum number of bytes to harvest per domain * @param maxObjects Maximum number of objects to harvest per domain *//*w w w. j a v a 2 s . co m*/ public void addSeedsFromFile(File seedsFile, String templateName, long maxBytes, int maxObjects, Map<String, String> attributeValues) { ArgumentNotValid.checkNotNull(seedsFile, "seeds"); ArgumentNotValid.checkTrue(seedsFile.isFile(), "seedsFile does not exist"); ArgumentNotValid.checkNotNullOrEmpty(templateName, "templateName"); if (!TemplateDAO.getInstance().exists(templateName)) { throw new UnknownID("No such template: " + templateName); } Map<String, Set<String>> acceptedSeeds = new HashMap<String, Set<String>>(); StringBuilder invalidMessage = new StringBuilder( "Unable to create an event harvest.\n" + "The following seeds are invalid:\n"); boolean valid = true; // validate all the seeds in the file // those accepted are entered into the acceptedSeeds datastructure // Iterate through the contents of the file LineIterator seedIterator = null; try { seedIterator = new LineIterator(new FileReader(seedsFile)); while (seedIterator.hasNext()) { String seed = seedIterator.next(); boolean seedValid = processSeed(seed, invalidMessage, acceptedSeeds); if (!seedValid) { valid = false; } } } catch (IOException e) { throw new IOFailure("Unable to process seedsfile ", e); } finally { LineIterator.closeQuietly(seedIterator); } if (!valid) { throw new ArgumentNotValid(invalidMessage.toString()); } addSeedsToDomain(templateName, maxBytes, maxObjects, acceptedSeeds, attributeValues); }
From source file:ke.co.tawi.babblesms.server.servlet.upload.UploadUtil.java
protected void saveContacts(File contactFile, Account account, ContactDAO contactDAO, PhoneDAO phoneDAO, List<String> groupUuids, ContactGroupDAO contactGroupDAO) { LineIterator lineIterator = null; Contact contact;// ww w . ja va 2s . c om Phone phone; List<Group> groupList = new LinkedList<>(); Group grp; for (String uuid : groupUuids) { grp = new Group(); grp.setUuid(uuid); groupList.add(grp); } try { lineIterator = FileUtils.lineIterator(contactFile, "UTF-8"); String line; String[] rowTokens, phoneTokens, networkTokens; while (lineIterator.hasNext()) { line = lineIterator.nextLine(); rowTokens = StringUtils.split(line, ','); // Extract the Contact and save contact = new Contact(); contact.setAccountUuid(account.getUuid()); contact.setName(rowTokens[0]); contact.setStatusUuid(Status.ACTIVE); contactDAO.putContact(contact); // Extract the phones and save phoneTokens = StringUtils.split(rowTokens[1], ';'); networkTokens = StringUtils.split(rowTokens[2], ';'); String network; for (int j = 0; j < phoneTokens.length; j++) { phone = new Phone(); phone.setPhonenumber(StringUtils.trimToEmpty(phoneTokens[j])); phone.setPhonenumber(StringUtils.remove(phone.getPhonenumber(), ' ')); phone.setContactUuid(contact.getUuid()); phone.setStatusuuid(Status.ACTIVE); network = StringUtils.lowerCase(StringUtils.trimToEmpty(networkTokens[j])); phone.setNetworkuuid(networkUuidArray[networkList.indexOf(network)]); phoneDAO.putPhone(phone); } // Associate the Contact to the Groups for (Group group : groupList) { contactGroupDAO.putContact(contact, group); } } // end 'while (lineIterator.hasNext())' } catch (IOException e) { logger.error("IOException when storing: " + contactFile); logger.error(e); } finally { if (lineIterator != null) { lineIterator.close(); } } }
From source file:com.adobe.acs.tools.csv_asset_importer.impl.CsvAssetImporterServlet.java
/** * Adds a populated terminating field to the ends of CSV entries. * If the last entry in a CSV row is empty, the CSV library has difficulty understanding that is the end of the row. * * @param is the CSV file as an inputstream * @param separator The field separator/*from w w w .j a v a 2 s . c o m*/ * @param charset The charset * @return An inputstream that is the same as is, but each line has a populated line termination entry * @throws IOException */ private InputStream terminateLines(final InputStream is, final char separator, final String charset) throws IOException { final ByteArrayOutputStream baos = new ByteArrayOutputStream(); final PrintStream printStream = new PrintStream(baos); final LineIterator lineIterator = IOUtils.lineIterator(is, charset); while (lineIterator.hasNext()) { String line = StringUtils.stripToNull(lineIterator.next()); if (line != null) { line += separator + TERMINATED; printStream.println(line); } } return new ByteArrayInputStream(baos.toByteArray()); }
From source file:de.tudarmstadt.lt.seg.app.Segmenter.java
private void run_sequential_line() throws Exception { ISentenceSplitter sentenceSplitter = newSentenceSplitter(); ITokenizer tokenizer = newTokenizer(); InputStream in = System.in; if (!"-".equals(_filename_in)) in = new FileInputStream(_filename_in); LineIterator liter = new LineIterator( new BufferedReader(new InputStreamReader(in, Charset.defaultCharset()))); OutputStream out = System.out; if (!"-".equals(_filename_out)) out = new FileOutputStream(_filename_out); PrintWriter w = new PrintWriter(new OutputStreamWriter(out, Charset.defaultCharset())); for (long lc = 0; liter.hasNext();) { if (++lc % 1000 == 0) System.err.format("Processing line %d ('%s')%n", lc, _filename_in); String l = liter.next().replace("\\t", "\t").replace("\\n", "\n"); split_and_tokenize(new StringReader(l), String.format("%s:%d", _filename_in, lc), sentenceSplitter, tokenizer, _level_filter, _level_normalize, _merge_types, _merge_tokens, _separator_sentence, _separator_token, _separator_desc, w); }/* ww w . j a va2 s . c o m*/ }
From source file:de.tudarmstadt.lt.lm.app.GenerateNgramIndex.java
public void create_vocabulary_index(File vocabulary_file) throws IOException { File index_dir = new File(_index_dir, "vocab"); if (index_dir.exists()) { LOG.info("Vocabulary index already exists in directory '{}'.", index_dir.getAbsolutePath()); if (_overwrite) { LOG.info("Overwriting index '{}',", index_dir); index_dir.delete();/* w w w . j ava 2 s . co m*/ } else return; } index_dir.mkdirs(); Analyzer analyzer = new KeywordAnalyzer(); IndexWriterConfig iwc = new IndexWriterConfig(Version.LUCENE_4_9, analyzer); iwc.setOpenMode(OpenMode.CREATE); iwc.setRAMBufferSizeMB(1024.0); Directory directory = new MMapDirectory(index_dir); IndexWriter writer_vocab = new IndexWriter(directory, iwc); InputStream in = new FileInputStream(vocabulary_file); if (vocabulary_file.getName().endsWith(".gz")) in = new GZIPInputStream(in); LineIterator iter = new LineIterator(new BufferedReader(new InputStreamReader(in, "UTF-8"))); Document doc = new Document(); Field f_word = new StringField("word", "", Field.Store.YES); doc.add(f_word); long c = 0; while (iter.hasNext()) { if (++c % 10000 == 0) LOG.info("Adding {}'th word.", c); String line = iter.next(); try { String word = line.trim(); f_word.setStringValue(word); writer_vocab.addDocument(doc); } catch (Exception e) { LOG.warn("Could not process line '{}' in file '{}', malformed line.", line, vocabulary_file, e); } } writer_vocab.forceMergeDeletes(); writer_vocab.commit(); writer_vocab.close(); }
From source file:egovframework.rte.fdl.filehandling.FilehandlingServiceTest.java
/** * @throws Exception/*w ww .j a v a2s. c o m*/ */ @Test public void testLineIterator() throws Exception { String[] string = { "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"", " xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd\">", " <parent>", " <groupId>egovframework.rte</groupId>", " <artifactId>egovframework.rte.root</artifactId>", " <version>1.0.0-SNAPSHOT</version>", " </parent>", " <modelVersion>4.0.0</modelVersion>", " <groupId>egovframework.rte</groupId>", " <artifactId>egovframework.rte.fdl.filehandling</artifactId>", " <packaging>jar</packaging>", " <version>1.0.0-SNAPSHOT</version>", " <name>egovframework.rte.fdl.filehandling</name>", " <url>http://maven.apache.org</url>", " <dependencies>", " <dependency>", " <groupId>junit</groupId>", " <artifactId>junit</artifactId>", " <version>4.4</version>", " <scope>test</scope>", " </dependency>", " <dependency>", " <groupId>commons-vfs</groupId>", " <artifactId>commons-vfs</artifactId>", " <version>1.0</version>", " </dependency>", " <dependency>", " <groupId>commons-io</groupId>", " <artifactId>commons-io</artifactId>", " <version>1.4</version>", " </dependency>", " <!-- egovframework.rte -->", " <dependency>", " <groupId>egovframework.rte</groupId>", " <artifactId>egovframework.rte.fdl.string</artifactId>", " <version>1.0.0-SNAPSHOT</version>", " </dependency>", " </dependencies>", "</project>" }; try { File file = new File("pom.xml"); LineIterator it = FileUtils.lineIterator(file, "UTF-8"); try { log.debug("############################# LineIterator ###############################"); for (int i = 0; it.hasNext(); i++) { String line = it.nextLine(); log.info(line); assertEquals(string[i], line); } } finally { LineIterator.closeQuietly(it); } } catch (Exception e) { log.error(e.getCause()); } }
From source file:fr.ericlab.sondy.core.DataManipulation.java
public StopWords getStopwords(AppVariables appVariables, String name) { StopWords stopWords = new StopWords(); LineIterator it = null; try {//from w ww .j a v a 2 s.c o m it = FileUtils.lineIterator(new File(appVariables.configuration.getWorkspace() + "/stopwords/" + name), "UTF-8"); while (it.hasNext()) { stopWords.add(it.nextLine()); } } catch (IOException ex) { Logger.getLogger(DataManipulation.class.getName()).log(Level.SEVERE, null, ex); } finally { LineIterator.closeQuietly(it); } return stopWords; }