List of usage examples for com.fasterxml.jackson.databind MappingIterator hasNext
@Override public boolean hasNext()
From source file:io.fabric8.forge.rest.model.Models.java
/** * Saves the json object to the given file *///from w w w . jav a 2 s.c o m public static <T> List<T> loadJsonValues(File json, Class<T> clazz) throws IOException { List<T> answer = new ArrayList<>(); if (json.exists() && json.isFile()) { MappingIterator<T> iter = objectMapper.readerFor(clazz).readValues(json); while (iter.hasNext()) { answer.add(iter.next()); } } return answer; }
From source file:io.fabric8.devops.ProjectConfigs.java
static <T> List<T> parseYamlValues(File file, Class<T> clazz) throws IOException { ObjectMapper mapper = createObjectMapper(); MappingIterator<T> iter = mapper.readerFor(clazz).readValues(file); List<T> answer = new ArrayList<>(); while (iter.hasNext()) { answer.add(iter.next());/*from w w w . jav a 2s .co m*/ } return answer; }
From source file:org.jongo.spike.MongoDumpTest.java
@Test public void importBsonDumpFileIntoCollection() throws Exception { InputStream bsonDump = getClass().getClassLoader().getResourceAsStream("1000friends.bson"); BsonFactory bsonFactory = new BsonFactory(); //bsonFactory.enable(BsonParser.Feature.HONOR_DOCUMENT_LENGTH); // fails when enabled ObjectReader reader = new ObjectMapper(bsonFactory).reader(BasicBSONObject.class); MappingIterator<BSONObject> iterator = reader.readValues(bsonDump); try {// w w w . j av a2 s . c o m while (iterator.hasNext()) { BSONObject bsonObject = iterator.next(); collection.withWriteConcern(WriteConcern.SAFE).save(bsonObject); } } finally { iterator.close(); } assertThat(collection.count()).isEqualTo(1000); }
From source file:com.marklogic.entityservices.e2e.CSVLoader.java
public void go() throws InterruptedException { logger.info("job started."); File dir = new File(projectDir + "/data/superstore-csv"); WriteHostBatcher batcher = moveMgr.newWriteHostBatcher().withBatchSize(100).withThreadCount(10) .onBatchSuccess((client, batch) -> logger.info(getSummaryReport(batch))) .onBatchFailure((client, batch, throwable) -> { logger.warn("FAILURE on batch:" + batch.toString() + "\n", throwable); throwable.printStackTrace(); });/*from ww w .ja va2 s . co m*/ ticket = moveMgr.startJob(batcher); try (DirectoryStream<Path> stream = Files.newDirectoryStream(dir.toPath(), "*.csv")) { for (Path entry : stream) { logger.debug("Adding " + entry.getFileName().toString()); MappingIterator<ObjectNode> it = csvMapper.readerFor(ObjectNode.class).with(bootstrapSchema) .readValues(entry.toFile()); long i = 0; while (it.hasNext()) { ObjectNode jsonNode = it.next(); String jsonString = mapper.writeValueAsString(jsonNode); String uri = entry.toUri().toString() + "-" + Long.toString(i++) + ".json"; DocumentMetadataHandle metadata = new DocumentMetadataHandle() // .withCollections("raw", "csv") // .withPermission("nwind-reader", Capability.READ) // .withPermission("nwind-writer", Capability.INSERT, Capability.UPDATE); batcher.add(uri, metadata, new StringHandle(jsonString)); if (i % 1000 == 0) logger.debug("Inserting JSON document " + uri); } it.close(); } } catch (IOException e) { e.printStackTrace(); } batcher.flush(); }
From source file:com.marklogic.entityservices.examples.CSVLoader.java
public void go() throws InterruptedException { logger.info("job started."); File dir = new File(projectDir + "/data/third-party/csv"); WriteHostBatcher batcher = moveMgr.newWriteHostBatcher().withBatchSize(100).withThreadCount(10) .onBatchSuccess((client, batch) -> logger.info(getSummaryReport(batch))) .onBatchFailure((client, batch, throwable) -> { logger.warn("FAILURE on batch:" + batch.toString() + "\n", throwable); throwable.printStackTrace(); });/*from w ww . j ava2s. c o m*/ ticket = moveMgr.startJob(batcher); try (DirectoryStream<Path> stream = Files.newDirectoryStream(dir.toPath(), "*.csv")) { for (Path entry : stream) { logger.debug("Adding " + entry.getFileName().toString()); MappingIterator<ObjectNode> it = csvMapper.readerFor(ObjectNode.class).with(bootstrapSchema) .readValues(entry.toFile()); long i = 0; while (it.hasNext()) { ObjectNode jsonNode = it.next(); String jsonString = mapper.writeValueAsString(jsonNode); String uri = entry.toUri().toString() + "-" + Long.toString(i++) + ".json"; DocumentMetadataHandle metadata = new DocumentMetadataHandle() // .withCollections("raw", "csv") // .withPermission("race-reader", Capability.READ) // .withPermission("race-writer", Capability.INSERT, Capability.UPDATE); batcher.add(uri, metadata, new StringHandle(jsonString)); if (i % 1000 == 0) logger.debug("Inserting JSON document " + uri); } it.close(); } } catch (IOException e) { e.printStackTrace(); } batcher.flush(); }
From source file:net.flutterflies.fwapaderp.game.TeamManager.java
/** * Converts a .csv spreadsheet template into a UHCTeam object * * @param teamsList Used purely as a reference to an earlier object, overwriting it * @return A list of all UHCTeams/*w w w . j a v a2 s .c o m*/ */ public ArrayList<UHCTeam> createTeamsFromCSV(ArrayList<UHCTeam> teamsList) { ArrayList<String[]> rows = new ArrayList<>(); File teamsFile = new File(plugin.getDataFolder(), "teams.csv"); CsvMapper mapper = new CsvMapper(); //Clear any existing teams on the team list teamsList.clear(); mapper.enable(CsvParser.Feature.WRAP_AS_ARRAY); //Try to load values from teams.csv try { MappingIterator<String[]> iterator = mapper.readerFor(String[].class).readValues(teamsFile); while (iterator.hasNext()) { rows.add(rows.size(), iterator.next()); } } catch (IOException e) { plugin.getLogger().log(Level.SEVERE, "Could not find the file teams.csv! Please either supply" + "a teams.csv file or disable usePreMadeTeams in the plugin's config file."); System.exit(0); } //For each row in the csv file create a new team for (int i = 1; i < rows.size(); i++) { String[] team = rows.get(i); List<String> teamPlayerList = new ArrayList<>(); for (int j = 2; j < team.length; j++) { if (!team[j].equals("")) { teamPlayerList.add(teamPlayerList.size(), team[j]); } } teamsList.add(teamsList.size(), new UHCTeam(team[0], team[1].toUpperCase().replace(' ', '_'), teamPlayerList)); } //Write Teams to a yaml file for (int i = 0; i < teamList.size(); i++) { //Get the team UHCTeam team = teamList.get(i); //Write the team name plugin.getTeamConfig().set("teams.team" + (i + 1) + ".name", team.getTeamName(false)); //Write the team's color plugin.getTeamConfig().set("teams.team" + (i + 1) + ".color", team.getTeamColor()); //Write all the players in the team for (int j = 0; j < team.getTeamSize(); j++) { plugin.getTeamConfig().set("teams.team" + (i + 1) + ".players.player" + (j + 1), team.getPlayers().get(j)); } } plugin.saveTeamsConfig(); return teamsList; }
From source file:nl.esciencecenter.ptk.csv.CSVData.java
public void parseText(String csvText) throws IOException { // Extended CSV ! // Pass I: remove comments including the ending newline! Pattern pat = Pattern.compile("^#.*\n", Pattern.MULTILINE); csvText = pat.matcher(csvText).replaceAll(""); // todo: check how jackson can parse alternative field separators; if (fieldSeparators != null) { // csvText=csvText.replaceAll(",","_"); for (String sep : fieldSeparators) { // lazy replace csvText = csvText.replaceAll(sep, ","); }/* www. j av a 2 s . c om*/ } // Not needed: Pass II: remove empty lines as a result of the // pat=Pattern.compile("\n\n",Pattern.MULTILINE); // newTxt=pat.matcher(newTxt).replaceAll(""); // ObjectMapper mapper=new ObjectMapper(); CsvMapper mapper = new CsvMapper(); mapper.enable(CsvParser.Feature.WRAP_AS_ARRAY); MappingIterator<Object[]> it = mapper.reader(Object[].class).readValues(csvText); if (it.hasNext() == false) { throw new IOException("Empty text or csv text contains no headers!"); } // read header: Object headers[] = it.next(); StringList list = new StringList(); for (int i = 0; i < headers.length; i++) { list.add(headers[i].toString()); } logger.debugPrintf("Headers=%s\n", list.toString("<>")); headerList = list; data = new ArrayList<String[]>(); // check header values. while (it.hasNext()) { Object line[] = it.next(); String row[] = new String[line.length]; for (int j = 0; j < line.length; j++) { Object value = line[j]; if (value != null) { row[j] = value.toString(); } } data.add(row); } logger.debugPrintf("Read %d number of rows\n", data.size()); }
From source file:com.marklogic.client.test.SPARQLManagerTest.java
private int countLines(MappingIterator<?> iter) { int numLines = 0; while (iter.hasNext()) { iter.next();/*from w w w .ja va 2 s . c o m*/ numLines++; } return numLines; }
From source file:com.marklogic.client.test.SPARQLManagerTest.java
@Test public void testInference() throws Exception { gmgr.write("/ontology", new StringHandle(ontology).withMimetype("application/n-triples")); SPARQLQueryDefinition qdef = smgr.newQueryDefinition("SELECT ?s { ?s a <http://example.org/C1> }"); qdef.setIncludeDefaultRulesets(false); StringHandle handle = new StringHandle().withMimetype(SPARQLMimeTypes.SPARQL_CSV); String results = smgr.executeSelect(qdef, handle).get(); assertNull(results);//from w w w .jav a 2 s . c o m qdef.setRulesets(SPARQLRuleset.RANGE); results = smgr.executeSelect(qdef, handle).get(); assertEquals(1, countLines(parseCsv(results))); qdef.setRulesets(SPARQLRuleset.RANGE, SPARQLRuleset.DOMAIN); results = smgr.executeSelect(qdef, handle).get(); MappingIterator<Map<String, String>> csvRows = parseCsv(results); assertTrue(csvRows.hasNext()); Map<String, String> row = csvRows.next(); assertEquals("http://example.org/o1", row.get("s")); assertTrue(csvRows.hasNext()); row = csvRows.next(); assertEquals("http://example.org/s2", row.get("s")); assertFalse(csvRows.hasNext()); gmgr.delete("/ontology"); }
From source file:de.undercouch.bson4jackson.BsonParserTest.java
/** * Tests if a simple BSON file can be read successfully * @throws Exception if something went wrong *///from w w w.java 2 s. c om @Test public void readBSONFile() throws Exception { InputStream is = getClass().getResourceAsStream("test.bson"); try { ObjectMapper mapper = new ObjectMapper(new BsonFactory()); MappingIterator<BSONObject> iterator = mapper.reader(BasicBSONObject.class).readValues(is); BSONObject o = null; while (iterator.hasNext()) { assertNull(o); BSONObject object = iterator.next(); assertNotNull(object); o = object; } assertEquals("Hello world", o.get("message")); assertEquals(10.0, o.get("size")); assertTrue(o.keySet().contains("_id")); assertEquals(3, o.keySet().size()); } finally { is.close(); } }