List of usage examples for org.apache.commons.vfs2 FileContent getInputStream
InputStream getInputStream() throws FileSystemException;
From source file:fulcrum.xml.ParserTest.java
private static InputStream getInputStream(String location) { InputStream is = null;/*from w w w. ja va 2s . c o m*/ try { FileSystemManager fsManager = VFS.getManager(); FileObject fileObj = fsManager.resolveFile(location); if (fileObj != null && fileObj.exists() && fileObj.isReadable()) { FileContent content = fileObj.getContent(); is = content.getInputStream(); } } catch (Exception e) { e.printStackTrace(); } return is; }
From source file:gridool.sqlet.catalog.PartitioningConf.java
private static void loadSettingsFromCsv(String uri, List<Partition> list) throws SqletException { final InputStream is; try {//from ww w . j a v a 2 s .c o m FileSystemManager fsManager = VFS.getManager(); FileObject fileObj = fsManager.resolveFile(uri); FileContent fileContent = fileObj.getContent(); is = fileContent.getInputStream(); } catch (FileSystemException e) { throw new SqletException(SqletErrorType.configFailed, "failed to load a file: " + uri, e); } InputStreamReader reader = new InputStreamReader(new FastBufferedInputStream(is)); HeaderAwareCsvReader csvReader = new HeaderAwareCsvReader(reader, ',', '"'); final Map<String, Integer> headerMap; try { headerMap = csvReader.parseHeader(); } catch (IOException e) { throw new SqletException(SqletErrorType.configFailed, "failed to parse a header: " + uri, e); } final int[] fieldIndexes = toFieldIndexes(headerMap); final Map<GridNode, Partition> masterSlave = new HashMap<GridNode, Partition>(128); while (csvReader.next()) { String nodeStr = csvReader.get(fieldIndexes[0]); String masterStr = csvReader.get(fieldIndexes[1]); String dbUrl = csvReader.get(fieldIndexes[2]); String user = csvReader.get(fieldIndexes[3]); String password = csvReader.get(fieldIndexes[4]); String mapOutput = csvReader.get(fieldIndexes[5]); Preconditions.checkNotNull(nodeStr, dbUrl); GridNode node = GridUtils.getNode(nodeStr); Partition p = new Partition(node, dbUrl, user, password, mapOutput); if (masterStr == null || masterStr.length() == 0) { masterSlave.put(node, p); list.add(p); } else { GridNode master = GridUtils.getNode(masterStr); Partition masterPartition = masterSlave.get(master); if (masterPartition == null) { LOG.error("Master partition is not found for slave: " + p); } else { masterPartition.addSlave(p); } } } }
From source file:com.wipro.ats.bdre.tdimport.FileMonitor.java
private static void putEligibleFileInfoInMap(String fileName, FileContent fc) { // *Start* Eligible files moved to data structure for ingestion to HDFS FileCopyInfo fileCopyInfo = new FileCopyInfo(); try {/*from w w w . j a v a 2 s . co m*/ fileCopyInfo.setFileName(fileName); fileCopyInfo.setSubProcessId(TDImportRunnableMain.getSubProcessId()); fileCopyInfo.setServerId(Integer.toString(123461)); fileCopyInfo.setSrcLocation(fc.getFile().getName().getPath()); fileCopyInfo.setTdTable(TDImportRunnableMain.getTdTable()); fileCopyInfo.setFileHash(DigestUtils.md5Hex(fc.getInputStream())); fileCopyInfo.setFileSize(fc.getSize()); fileCopyInfo.setTimeStamp(fc.getLastModifiedTime()); fileCopyInfo.setTdDB(TDImportRunnableMain.getTdDB()); fileCopyInfo.setTdUserName(TDImportRunnableMain.getTdUserName()); fileCopyInfo.setTdPassword(TDImportRunnableMain.getTdPassword()); fileCopyInfo.setTdDelimiter(TDImportRunnableMain.getTdDelimiter()); fileCopyInfo.setTdTpdid(TDImportRunnableMain.getTdTpdid()); // putting element to structure addToQueue(fileName, fileCopyInfo); } catch (Exception err) { LOGGER.error("Error adding file to queue ", err); throw new BDREException(err); } // *End* Eligible files moved to data structure for ingestion to HDFS }
From source file:com.wipro.ats.bdre.filemon.FileMonitor.java
private static void putEligibleFileInfoInMap(String fileName, FileContent fc) { // *Start* Eligible files moved to data structure for ingestion to HDFS FileCopyInfo fileCopyInfo = new FileCopyInfo(); try {/*from w w w . java2s. c om*/ fileCopyInfo.setFileName(fileName); fileCopyInfo.setSubProcessId(FileMonRunnableMain.getSubProcessId()); fileCopyInfo.setServerId(Integer.toString(123461)); fileCopyInfo.setSrcLocation(fc.getFile().getName().getPath()); fileCopyInfo.setDstLocation(new java.io.File(fileName).getParent() .replace(FileMonRunnableMain.getMonitoredDirName(), FileMonRunnableMain.getHdfsUploadDir())); fileCopyInfo.setFileHash(DigestUtils.md5Hex(fc.getInputStream())); fileCopyInfo.setFileSize(fc.getSize()); fileCopyInfo.setTimeStamp(fc.getLastModifiedTime()); // putting element to structure addToQueue(fileName, fileCopyInfo); } catch (Exception err) { LOGGER.error("Error adding file to queue ", err); throw new BDREException(err); } // *End* Eligible files moved to data structure for ingestion to HDFS }
From source file:fulcrum.xml.soap12.Soap12XOMTest.java
@Test public void testXOM() { Document doc = null;// www . j av a 2s. c o m Builder parser = null; try { FileSystemManager fsManager = VFS.getManager(); FileObject fileObj = fsManager.resolveFile(SIMPLE); if (fileObj != null && fileObj.exists() && fileObj.isReadable()) { FileContent content = fileObj.getContent(); InputStream is = content.getInputStream(); LOGGER.info("STARTING PARSE:"); parser = new Builder(); doc = parser.build(is); LOGGER.info("ENDING PARSE"); System.out.println(doc.toXML()); } } catch (Exception e) { e.printStackTrace(); } finally { parser = null; } }
From source file:fulcrum.xml.Parser.java
/** * Only provides parsing functions to the "fulcrum.xml" package. * /*www .java2s . c o m*/ * @see Document * * @param fileLocation * @param document * @throws ParseException */ protected void parse(String fileLocation, Document document) throws ParseException { FileObject fileObj = null; try { fileObj = fsManager.resolveFile(fileLocation); if (fileObj != null && fileObj.exists() && fileObj.isReadable()) { FileContent content = fileObj.getContent(); InputStream is = content.getInputStream(); int size = is.available(); LOGGER.debug("Total File Size: " + size + " bytes"); byte[] buffer = new byte[size]; is.read(buffer, 0, size); LOGGER.debug("Start parsing"); parse(buffer, document); LOGGER.debug("Finished paring"); buffer = null; content.close(); fileObj.close(); } } catch (Exception e) { throw new ParseException(e); } }
From source file:gridool.sqlet.catalog.MapReduceConf.java
public void loadReducers(String uri) throws SqletException { if (uri.endsWith(".csv") || uri.endsWith(".CSV")) { final InputStream is; try {//w w w . ja va 2 s . c om FileSystemManager fsManager = VFS.getManager(); FileObject fileObj = fsManager.resolveFile(uri); FileContent fileContent = fileObj.getContent(); is = fileContent.getInputStream(); } catch (FileSystemException e) { throw new SqletException(SqletErrorType.configFailed, "failed to load a file: " + uri, e); } InputStreamReader reader = new InputStreamReader(new FastBufferedInputStream(is)); HeaderAwareCsvReader csvReader = new HeaderAwareCsvReader(reader, ',', '"'); final Map<String, Integer> headerMap; try { headerMap = csvReader.parseHeader(); } catch (IOException e) { throw new SqletException(SqletErrorType.configFailed, "failed to parse a header: " + uri, e); } final int[] fieldIndexes = toFieldIndexes(headerMap); while (csvReader.next()) { String id = csvReader.get(fieldIndexes[0]); String nodeStr = csvReader.get(fieldIndexes[1]); String dbUrl = csvReader.get(fieldIndexes[2]); String user = csvReader.get(fieldIndexes[3]); String password = csvReader.get(fieldIndexes[4]); String xferPortStr = csvReader.get(fieldIndexes[5]); String shuffleDataSinkStr = csvReader.get(fieldIndexes[6]); String[] shuffleDataSink = FileUtils.parsePathExpressions(shuffleDataSinkStr, ","); Preconditions.checkNotNull(id, nodeStr); GridNode hostNode = GridUtils.getNode(nodeStr); int xferPort = (xferPortStr == null || xferPortStr.isEmpty()) ? GridXferService.DEFAULT_RECV_PORT : Integer.parseInt(xferPortStr); Reducer r = new Reducer(id, hostNode, dbUrl, user, password, xferPort, shuffleDataSink); reducers.add(r); } } else { throw new IllegalArgumentException("Unsupported URI: " + uri); } }
From source file:com.carrotgarden.nexus.example.script.ScriptStorageImpl.java
private void updateScript(final FileObject file) { final FileName name = file.getName(); log.info("New script file found: " + name); String script;// w w w.j a v a 2s .c om try { final FileContent content = file.getContent(); script = IOUtil.toString(content.getInputStream()); content.close(); } catch (final IOException e) { log.warn("Unable to read script file: " + name, e); return; } synchronized (scriptStore) { scriptStore.put(getName(name), script); } }
From source file:cz.lbenda.dataman.db.ExtConfFactory.java
/** Load extend configuration to given database configuration */ public void load() { if (exConf != null && StringUtils.isBlank(exConf.getSrc())) { loadExConfType(exConf);/* w ww . ja va2s .c o m*/ } else if (exConf != null) { if (exConf.getSrc().startsWith("db://")) { String path = exConf.getSrc().substring(5, exConf.getSrc().length()); dbConfig.getConnectionProvider() .onPreparedStatement(String.format( "select usr, exConf from %s where (usr = ? or usr is null or usr = '')", path), tuple2 -> { PreparedStatement ps = tuple2.get1(); String extendConfiguration = null; try { ps.setString(1, dbConfig.getConnectionProvider().getUser().getUsername()); try (ResultSet rs = ps.executeQuery()) { while (rs.next()) { if (rs.getString(1) == null && extendConfiguration == null) { // The null user is used only if no specific user configuration is read extendConfiguration = rs.getString(2); } else if (rs.getString(1) != null) { extendConfiguration = rs.getString(2); } } } } catch (SQLException e) { LOG.error("Problem with read extend config from table: " + exConf.getSrc(), e); ExceptionMessageFrmController.showException( "Problem with read extend config from table: " + exConf.getSrc(), e); } if (!StringUtils.isBlank(extendConfiguration)) { loadExConfType(new StringReader(extendConfiguration)); } else { StringUtils.isBlank(null); } }); } else { try { FileSystemManager fsManager = VFS.getManager(); FileObject file = fsManager.resolveFile(exConf.getSrc()); if (!file.exists()) { ExceptionMessageFrmController.showException("File not exist: " + exConf.getSrc()); } else if (file.getChildren() == null || file.getChildren().length == 0) { new Thread(() -> { try { FileContent content = file.getContent(); loadExConfType(new InputStreamReader(content.getInputStream())); content.close(); } catch (FileSystemException e) { LOG.error("Problem with read extend config from file: " + exConf.getSrc(), e); ExceptionMessageFrmController.showException( "Problem with read extend config from file: " + exConf.getSrc(), e); } }).start(); } else { ExceptionMessageFrmController .showException("The file type isn't supported: " + exConf.getSrc()); } } catch (FileSystemException e) { LOG.error("Problem with read extend config from file: " + exConf.getSrc(), e); ExceptionMessageFrmController .showException("Problem with read extend config from file: " + exConf.getSrc(), e); } } } }
From source file:mondrian.spi.impl.ApacheVfs2VirtualFileHandler.java
public InputStream readVirtualFile(String url) throws FileSystemException { // Treat catalogUrl as an Apache VFS (Virtual File System) URL. // VFS handles all of the usual protocols (http:, file:) // and then some. FileSystemManager fsManager = VFS.getManager(); if (fsManager == null) { throw Util.newError("Cannot get virtual file system manager"); }// w w w .ja va2 s. c om File userDir = new File("").getAbsoluteFile(); FileObject file = fsManager.resolveFile(userDir, url); FileContent fileContent = null; try { if (!file.isReadable()) { throw Util.newError("Virtual file is not readable: " + url); } fileContent = file.getContent(); } finally { file.close(); } if (fileContent == null) { throw Util.newError("Cannot get virtual file content: " + url); } return fileContent.getInputStream(); }