List of usage examples for org.apache.commons.collections4 IteratorUtils toList
public static <E> List<E> toList(final Iterator<? extends E> iterator)
From source file:org.apache.metron.profiler.spark.function.HBaseWriterFunctionTest.java
@Test public void testWriteNone() throws Exception { // there are no profile measurements to write List<ProfileMeasurementAdapter> measurements = new ArrayList<>(); // setup the function to test HBaseWriterFunction function = new HBaseWriterFunction(profilerProperties); function.withTableProviderImpl(MockHBaseTableProvider.class.getName()); // write the measurements Iterator<Integer> results = function.call(measurements.iterator()); // validate the result List<Integer> counts = IteratorUtils.toList(results); Assert.assertEquals(1, counts.size()); Assert.assertEquals(0, counts.get(0).intValue()); }
From source file:org.apache.streams.storm.trident.StreamsProviderSpout.java
@Override public synchronized void emitBatch(long l, TridentCollector tridentCollector) { List<StreamsDatum> batch; batch = IteratorUtils.toList(provider.readCurrent().iterator()); for (StreamsDatum datum : batch) { tridentCollector//from www .j ava 2s . c o m .emit(Lists.newArrayList(datum.getTimestamp(), datum.getSequenceid(), datum.getDocument())); } }
From source file:org.failearly.dataset.internal.generator.GeneratorTestBase.java
protected List<T> asList(Generator<T> generator) { return IteratorUtils.toList(generator.iterator()); }
From source file:org.gitia.jdataanalysis.JDataAnalysis.java
private void obtainData() { try {/*from ww w. ja v a2s .c o m*/ CSVFormat csvf; if (this.isHeader) { csvf = CSVFormat.DEFAULT.withHeader(); parser = new CSVParser(new FileReader(path), csvf); datos = IteratorUtils.toList(parser.iterator()); data = new String[datos.size()][datos.get(0).size()]; for (int i = 0; i < datos.size(); i++) { for (int j = 0; j < datos.get(0).size(); j++) { data[i][j] = datos.get(i).get(j); } } } else { csvf = CSVFormat.DEFAULT.withIgnoreHeaderCase(isHeader); CsvMapper mapper = new CsvMapper(); // important: we need "array wrapping" (see next section) here: mapper.enable(CsvParser.Feature.WRAP_AS_ARRAY); File csvFile = new File("src/main/resources/handwrittennumbers/mnist_train_in.csv"); // or from String, URL etc MappingIterator<double[]> it = mapper.readerFor(double[].class).readValues(csvFile); int a = 1; List<double[]> listData = it.readAll(); double[][] data = new double[listData.size()][listData.get(0).length]; for (int i = 0; i < listData.size(); i++) { data[i] = listData.get(i); System.out.println(a++ + ":\t"); } SimpleMatrix A = new SimpleMatrix(data); A.print(); } parser = new CSVParser(new FileReader(path), csvf); datos = IteratorUtils.toList(parser.iterator()); data = new String[datos.size()][datos.get(0).size()]; for (int i = 0; i < datos.size(); i++) { for (int j = 0; j < datos.get(0).size(); j++) { data[i][j] = datos.get(i).get(j); } } } catch (IOException ex) { Logger.getLogger(JDataAnalysis.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:org.ligoj.app.plugin.id.ldap.dao.AbstractContainerLdapRepository.java
@Override public Page<T> findAll(final Set<T> groups, final String criteria, final Pageable pageable, final Map<String, Comparator<T>> customComparators) { // Create the set with the right comparator final List<Sort.Order> orders = IteratorUtils .toList(ObjectUtils.defaultIfNull(pageable.getSort(), new ArrayList<Sort.Order>()).iterator()); orders.add(DEFAULT_ORDER);/*ww w .j av a 2 s . com*/ final Sort.Order order = orders.get(0); Comparator<T> comparator = customComparators.get(order.getProperty()); if (order.getDirection() == Direction.DESC) { comparator = Collections.reverseOrder(comparator); } final Set<T> result = new TreeSet<>(comparator); // Filter the groups, filtering by the criteria addFilteredByPattern(groups, criteria, result); // Apply in-memory pagination return inMemoryPagination.newPage(result, pageable); }
From source file:org.ligoj.app.plugin.id.ldap.dao.UserLdapRepository.java
@Override public Page<UserOrg> findAll(final Collection<GroupOrg> requiredGroups, final Set<String> companies, final String criteria, final Pageable pageable) { // Create the set with the right comparator final List<Sort.Order> orders = IteratorUtils .toList(ObjectUtils.defaultIfNull(pageable.getSort(), new ArrayList<Sort.Order>()).iterator()); orders.add(DEFAULT_ORDER);// w w w. java 2 s . c o m final Sort.Order order = orders.get(0); Comparator<UserOrg> comparator = ObjectUtils.defaultIfNull(COMPARATORS.get(order.getProperty()), DEFAULT_COMPARATOR); if (order.getDirection() == Direction.DESC) { comparator = Collections.reverseOrder(comparator); } final Set<UserOrg> result = new TreeSet<>(comparator); // Filter the users traversing firstly the required groups and their members, // the companies, then the criteria final Map<String, UserOrg> users = findAll(); if (requiredGroups == null) { // No constraint on group addFilteredByCompaniesAndPattern(users.keySet(), companies, criteria, result, users); } else { // User must be within one the given groups for (final GroupOrg requiredGroup : requiredGroups) { addFilteredByCompaniesAndPattern(requiredGroup.getMembers(), companies, criteria, result, users); } } // Apply in-memory pagination return inMemoryPagination.newPage(result, pageable); }
From source file:org.openlmis.fulfillment.Resource2Db.java
Pair<List<String>, List<Object[]>> resourceCsvToBatchedPair(final Resource resource) throws IOException { XLOGGER.entry(resource.getDescription()); // parse CSV/* ww w .java2s . c om*/ try (InputStreamReader isReader = new InputStreamReader( new BOMInputStream(resource.getInputStream(), ByteOrderMark.UTF_8))) { CSVParser parser = CSVFormat.DEFAULT.withHeader().withNullString("").parse(isReader); // read header row MutablePair<List<String>, List<Object[]>> readData = new MutablePair<>(); readData.setLeft(new ArrayList<>(parser.getHeaderMap().keySet())); XLOGGER.info("Read header: " + readData.getLeft()); // read data rows List<Object[]> rows = new ArrayList<>(); for (CSVRecord record : parser.getRecords()) { if (!record.isConsistent()) { throw new IllegalArgumentException("CSV record inconsistent: " + record); } List theRow = IteratorUtils.toList(record.iterator()); rows.add(theRow.toArray()); } readData.setRight(rows); XLOGGER.exit("Records read: " + readData.getRight().size()); return readData; } }
From source file:org.pentaho.di.trans.steps.omniture.OmnitureInput.java
private Object[] prepareRecord(Record record) throws KettleException { // Build an empty row based on the meta-data Object[] outputRowData = buildEmptyRow(); try {/*from w ww. j ava2 s . c om*/ for (int i = 0; i < data.nrfields; i++) { String value = IteratorUtils.toList(record.iterator()) .get(data.headerNames.indexOf(meta.getInputFields()[i].getName())); // do trimming! switch (meta.getInputFields()[i].getTrimType()) { case OmnitureInputField.TYPE_TRIM_LEFT: value = Const.ltrim(value); break; case OmnitureInputField.TYPE_TRIM_RIGHT: value = Const.rtrim(value); break; case OmnitureInputField.TYPE_TRIM_BOTH: value = Const.trim(value); break; default: break; } // do conversions ValueMetaInterface targetValueMeta = data.outputRowMeta.getValueMeta(i); ValueMetaInterface sourceValueMeta = data.convertRowMeta.getValueMeta(i); outputRowData[i] = targetValueMeta.convertData(sourceValueMeta, value); } // End of loop over fields... RowMetaInterface irow = getInputRowMeta(); data.previousRow = irow == null ? outputRowData : irow.cloneRow(outputRowData); // copy it to make } catch (Exception e) { throw new KettleException( BaseMessages.getString(PKG, "OmnitureInput.Exception.CanNotParseFromOmniture"), e); } return outputRowData; }
From source file:org.xwiki.contrib.repository.pypi.internal.dto.pypiJsonApi.PypiPackageJSONDto.java
public List<String> getAvailableReleaseVersions() { return IteratorUtils.toList(releases.fieldNames()); }
From source file:therian.TherianModule.java
private static <T> T[] toArray(Iterable<T> iterable, Class<?> componentType) { final Collection<T> coll; if (iterable instanceof Collection) { coll = (Collection<T>) iterable; } else {/* w w w .j a v a2 s . c om*/ coll = IteratorUtils.toList(iterable.iterator()); } @SuppressWarnings("unchecked") final T[] result = (T[]) Array.newInstance(componentType, coll.size()); return coll.toArray(result); }