Example usage for java.util.stream Collectors toMap

List of usage examples for java.util.stream Collectors toMap

Introduction

In this page you can find the example usage for java.util.stream Collectors toMap.

Prototype

public static <T, K, U> Collector<T, ?, Map<K, U>> toMap(Function<? super T, ? extends K> keyMapper,
        Function<? super T, ? extends U> valueMapper) 

Source Link

Document

Returns a Collector that accumulates elements into a Map whose keys and values are the result of applying the provided mapping functions to the input elements.

Usage

From source file:com.github.aptd.simulation.datamodel.CXMLReader.java

/**
 * create the train list/*  ww w  .j  a v a2 s .c o m*/
 *
 * @param p_network network component
 * @param p_agents map with agent asl scripts
 * @param p_factory factory
 * @return unmodifiable map with trains
 */
private static Pair<Map<String, ITrain<?>>, Map<String, IDoor<?>>> train(final Network p_network,
        final Map<String, String> p_agents, final IFactory p_factory, final ITime p_time,
        final double p_minfreetimetoclose) {
    final String l_dooragent = IStatefulElement.getDefaultAsl("door");
    final Map<String, IElement.IGenerator<ITrain<?>>> l_generators = new ConcurrentHashMap<>();
    final Set<IAction> l_actions = CCommon.actionsFromPackage().collect(Collectors.toSet());
    final IElement.IGenerator<IDoor<?>> l_doorgenerator = doorgenerator(p_factory, l_dooragent, l_actions,
            p_time);
    final Map<String, AtomicLong> l_doorcount = Collections.synchronizedMap(new HashMap<>());
    final Map<String, IDoor<?>> l_doors = Collections.synchronizedMap(new HashMap<>());
    return new ImmutablePair<>(
            Collections.<String, ITrain<?>>unmodifiableMap(
                    p_network.getTimetable().getTrains().getTrain().parallelStream()
                            .filter(i -> hasagentname(i.getAny3())).map(i -> agentname(i, i.getAny3()))
                            .map(i -> l_generators
                                    .computeIfAbsent(i.getRight(),
                                            a -> traingenerator(p_factory, p_agents.get(i.getRight()),
                                                    l_actions, p_time))
                                    .generatesingle(i.getLeft().getId(),
                                            i.getLeft().getTrainPartSequence().stream().flatMap(ref -> {
                                                // @todo support multiple train parts
                                                final EOcpTT[] l_tts = ((ETrainPart) ref.getTrainPartRef()
                                                        .get(0).getRef()).getOcpsTT().getOcpTT()
                                                                .toArray(new EOcpTT[0]);
                                                final CTrain.CTimetableEntry[] l_entries = new CTrain.CTimetableEntry[l_tts.length];
                                                for (int j = 0; j < l_tts.length; j++) {
                                                    final EArrivalDepartureTimes l_times = l_tts[j].getTimes()
                                                            .stream()
                                                            .filter(t -> t.getScope()
                                                                    .equalsIgnoreCase("published"))
                                                            .findAny().orElseThrow(() -> new CSemanticException(
                                                                    "missing published times"));
                                                    l_entries[j] = new CTrain.CTimetableEntry(
                                                            j < 1 ? 0.0
                                                                    : ((ETrack) l_tts[j - 1].getSectionTT()
                                                                            .getTrackRef().get(0).getRef())
                                                                                    .getTrackTopology()
                                                                                    .getTrackEnd().getPos()
                                                                                    .doubleValue(),
                                                            ((EOcp) l_tts[j].getOcpRef()).getId(),
                                                            l_tts[j].getStopDescription().getOtherAttributes()
                                                                    .getOrDefault(PLATFORM_REF_ATTRIBUTE, null),
                                                            l_times.getArrival() == null ? null
                                                                    : l_times.getArrival().toGregorianCalendar()
                                                                            .toZonedDateTime()
                                                                            .with(LocalDate.from(p_time
                                                                                    .current()
                                                                                    .atZone(ZoneId
                                                                                            .systemDefault())))
                                                                            .toInstant(),
                                                            l_times.getDeparture() == null ? null
                                                                    : l_times.getDeparture()
                                                                            .toGregorianCalendar()
                                                                            .toZonedDateTime()
                                                                            .with(LocalDate.from(p_time
                                                                                    .current()
                                                                                    .atZone(ZoneId
                                                                                            .systemDefault())))
                                                                            .toInstant());
                                                }
                                                return Arrays.stream(l_entries);
                                            }), i.getLeft().getTrainPartSequence().stream()
                                                    // @todo support multiple train parts
                                                    .map(s -> (ETrainPart) s.getTrainPartRef().get(0).getRef())
                                                    .map(p -> (EFormation) p.getFormationTT().getFormationRef())
                                                    .flatMap(f -> f.getTrainOrder().getVehicleRef().stream())
                                                    .map(r -> new ImmutablePair<BigInteger, TDoors>(
                                                            r.getVehicleCount(),
                                                            ((EVehicle) r.getVehicleRef()).getWagon()
                                                                    .getPassenger().getDoors()))
                                                    .flatMap(v -> IntStream
                                                            .range(0,
                                                                    v.getLeft().intValue() * v.getRight()
                                                                            .getNumber().intValue())
                                                            .mapToObj(j -> l_doors.computeIfAbsent("door-"
                                                                    + i.getLeft().getId() + "-"
                                                                    + l_doorcount
                                                                            .computeIfAbsent(i.getLeft()
                                                                                    .getId(),
                                                                                    id -> new AtomicLong(1L))
                                                                            .getAndIncrement(),
                                                                    id -> l_doorgenerator.generatesingle(id,
                                                                            i.getLeft().getId(),
                                                                            v.getRight().getEntranceWidth()
                                                                                    .doubleValue()
                                                                                    / v.getRight().getNumber()
                                                                                            .longValue(),
                                                                            p_minfreetimetoclose))))
                                                    .collect(Collectors.toList())))
                            .collect(Collectors.toMap(IElement::id, i -> i))),
            l_doors);
}

From source file:delfos.rs.trustbased.WeightedGraph.java

private Map<Node, Map<Node, Number>> getSubGraphEdges(Collection<Node> nodes) {
    Map<Node, Map<Node, Number>> edgesOfSubGraph = nodes.parallelStream()
            .collect(Collectors.toMap(node1 -> node1, node1 -> {
                Map<Node, Number> edgesFromThisVertex = nodes.parallelStream()
                        .filter(node2 -> this.connectionWeight(node1, node2).isPresent())
                        .collect(Collectors.toMap(node2 -> node2, node2 -> {
                            return this.connectionWeight(node1, node2).get();
                        }));/*from w ww. ja v a2s.c  o m*/

                return edgesFromThisVertex;
            }));
    return edgesOfSubGraph;
}

From source file:com.epam.catgenome.manager.vcf.VcfManager.java

private VcfFilterInfo getFiltersInfo(FeatureReader<VariantContext> reader) throws IOException {
    VcfFilterInfo filterInfo = new VcfFilterInfo();

    VCFHeader header = (VCFHeader) reader.getHeader();
    Collection<VCFInfoHeaderLine> headerLines = header.getInfoHeaderLines();
    Map<String, InfoItem> infoItems = headerLines.stream().filter(l -> !isExtendedInfoLine(l.getDescription())) // Exclude ANN from fields,
            .map(InfoItem::new) // we don't need it in the index
            .collect(Collectors.toMap(InfoItem::getName, i -> i));
    filterInfo.setAvailableFilters(//  www  . j a  v a2s .  c  o  m
            header.getFilterLines().stream().map(VCFSimpleHeaderLine::getID).collect(Collectors.toSet()));

    List<String> filtersWhiteList = getFilterWhiteList();
    if (!filtersWhiteList.isEmpty()) {
        infoItems = scourFilterList(infoItems, filtersWhiteList);
    }

    filterInfo.setInfoItemMap(infoItems);

    return filterInfo;
}

From source file:com.netflix.metacat.connector.hive.HiveConnectorPartitionService.java

protected Map<String, PartitionHolder> getPartitionsByNames(final Table table,
        final List<String> partitionNames) {
    final String databasename = table.getDbName();
    final String tablename = table.getTableName();
    try {/* ww w  .jav a2s.c o m*/
        final List<Partition> partitions = metacatHiveClient.getPartitions(databasename, tablename,
                partitionNames);

        return partitions.stream().map(PartitionHolder::new).collect(Collectors.toMap(part -> {
            try {
                return Warehouse.makePartName(table.getPartitionKeys(), part.getPartition().getValues());
            } catch (Exception e) {
                throw new InvalidMetaException("One or more partition names are invalid.", e);
            }
        }, Function.identity()));
    } catch (Exception e) {
        throw new InvalidMetaException("One or more partition names are invalid.", e);
    }
}

From source file:co.rsk.peg.BridgeSerializationUtilsTest.java

@Test
public void serializeLockWhitelist() throws Exception {
    PowerMockito.mockStatic(RLP.class);
    mock_RLP_encodeBigInteger();/*from  ww w. j ava 2  s .co  m*/
    mock_RLP_encodeList();
    mock_RLP_encodeElement();

    byte[][] addressesBytes = new byte[][] { BtcECKey.fromPrivate(BigInteger.valueOf(100)).getPubKeyHash(),
            BtcECKey.fromPrivate(BigInteger.valueOf(200)).getPubKeyHash(),
            BtcECKey.fromPrivate(BigInteger.valueOf(300)).getPubKeyHash(),
            BtcECKey.fromPrivate(BigInteger.valueOf(400)).getPubKeyHash(),
            BtcECKey.fromPrivate(BigInteger.valueOf(500)).getPubKeyHash(),
            BtcECKey.fromPrivate(BigInteger.valueOf(600)).getPubKeyHash(), };
    Coin maxToTransfer = Coin.CENT;

    LockWhitelist lockWhitelist = new LockWhitelist(Arrays.stream(addressesBytes)
            .map(bytes -> new Address(NetworkParameters.fromID(NetworkParameters.ID_REGTEST), bytes)).collect(
                    Collectors.toMap(Function.identity(), k -> new OneOffWhiteListEntry(k, maxToTransfer))),
            0);

    byte[] result = BridgeSerializationUtils.serializeOneOffLockWhitelist(
            Pair.of(lockWhitelist.getAll(OneOffWhiteListEntry.class), lockWhitelist.getDisableBlockHeight()));
    StringBuilder expectedBuilder = new StringBuilder();
    Arrays.stream(addressesBytes).sorted(UnsignedBytes.lexicographicalComparator()).forEach(bytes -> {
        expectedBuilder.append("dd");
        expectedBuilder.append(Hex.toHexString(bytes));
        expectedBuilder.append("ff");
        expectedBuilder.append(Hex.toHexString(BigInteger.valueOf(maxToTransfer.value).toByteArray()));
    });
    expectedBuilder.append("ff00");
    byte[] expected = Hex.decode(expectedBuilder.toString());
    Assert.assertThat(result, is(expected));
}

From source file:com.uber.hoodie.common.table.view.HoodieTableFileSystemViewTest.java

/**
 * Helper method to test Views in the presence of concurrent compaction
 * @param skipCreatingDataFile if set, first File Slice will not have data-file set. This would
 *                             simulate inserts going directly to log files
 * @param isCompactionInFlight if set, compaction was inflight (running) when view was tested first time,
 *                             otherwise compaction was in requested state
 * @throws Exception//from  www  .j  a v  a2s.  com
 */
private void testViewForFileSlicesWithAsyncCompaction(boolean skipCreatingDataFile,
        boolean isCompactionInFlight) throws Exception {
    String partitionPath = "2016/05/01";
    new File(basePath + "/" + partitionPath).mkdirs();
    String fileId = UUID.randomUUID().toString();

    // if skipCreatingDataFile, then instantTime1 below acts like delta-commit, otherwise it is base-commit
    String instantTime1 = "1";
    String deltaInstantTime1 = "2";
    String deltaInstantTime2 = "3";

    String dataFileName = null;
    if (!skipCreatingDataFile) {
        dataFileName = FSUtils.makeDataFileName(instantTime1, 1, fileId);
        new File(basePath + "/" + partitionPath + "/" + dataFileName).createNewFile();
    }
    String fileName1 = FSUtils.makeLogFileName(fileId, HoodieLogFile.DELTA_EXTENSION, instantTime1, 0);
    String fileName2 = FSUtils.makeLogFileName(fileId, HoodieLogFile.DELTA_EXTENSION, instantTime1, 1);
    new File(basePath + "/" + partitionPath + "/" + fileName1).createNewFile();
    new File(basePath + "/" + partitionPath + "/" + fileName2).createNewFile();
    HoodieActiveTimeline commitTimeline = metaClient.getActiveTimeline();
    HoodieInstant instant1 = new HoodieInstant(true, HoodieTimeline.COMMIT_ACTION, instantTime1);
    HoodieInstant deltaInstant2 = new HoodieInstant(true, HoodieTimeline.DELTA_COMMIT_ACTION,
            deltaInstantTime1);
    HoodieInstant deltaInstant3 = new HoodieInstant(true, HoodieTimeline.DELTA_COMMIT_ACTION,
            deltaInstantTime2);

    commitTimeline.saveAsComplete(instant1, Optional.empty());
    commitTimeline.saveAsComplete(deltaInstant2, Optional.empty());
    commitTimeline.saveAsComplete(deltaInstant3, Optional.empty());

    refreshFsView(null);
    List<FileSlice> fileSlices = rtView.getLatestFileSlices(partitionPath).collect(Collectors.toList());
    String compactionRequestedTime = "4";
    String compactDataFileName = FSUtils.makeDataFileName(compactionRequestedTime, 1, fileId);
    List<Pair<String, FileSlice>> partitionFileSlicesPairs = new ArrayList<>();
    partitionFileSlicesPairs.add(Pair.of(partitionPath, fileSlices.get(0)));
    HoodieCompactionPlan compactionPlan = CompactionUtils.buildFromFileSlices(partitionFileSlicesPairs,
            Optional.empty(), Optional.empty());
    HoodieInstant compactionInstant = null;
    if (isCompactionInFlight) {
        // Create a Data-file but this should be skipped by view
        new File(basePath + "/" + partitionPath + "/" + compactDataFileName).createNewFile();
        compactionInstant = new HoodieInstant(State.INFLIGHT, HoodieTimeline.COMPACTION_ACTION,
                compactionRequestedTime);
        HoodieInstant requested = HoodieTimeline
                .getCompactionRequestedInstant(compactionInstant.getTimestamp());
        commitTimeline.saveToCompactionRequested(requested, AvroUtils.serializeCompactionPlan(compactionPlan));
        commitTimeline.transitionCompactionRequestedToInflight(requested);
    } else {
        compactionInstant = new HoodieInstant(State.REQUESTED, HoodieTimeline.COMPACTION_ACTION,
                compactionRequestedTime);
        commitTimeline.saveToCompactionRequested(compactionInstant,
                AvroUtils.serializeCompactionPlan(compactionPlan));
    }

    // Fake delta-ingestion after compaction-requested
    String deltaInstantTime4 = "5";
    String deltaInstantTime5 = "6";
    List<String> allInstantTimes = Arrays.asList(instantTime1, deltaInstantTime1, deltaInstantTime2,
            compactionRequestedTime, deltaInstantTime4, deltaInstantTime5);
    String fileName3 = FSUtils.makeLogFileName(fileId, HoodieLogFile.DELTA_EXTENSION, compactionRequestedTime,
            0);
    String fileName4 = FSUtils.makeLogFileName(fileId, HoodieLogFile.DELTA_EXTENSION, compactionRequestedTime,
            1);
    new File(basePath + "/" + partitionPath + "/" + fileName3).createNewFile();
    new File(basePath + "/" + partitionPath + "/" + fileName4).createNewFile();
    HoodieInstant deltaInstant4 = new HoodieInstant(true, HoodieTimeline.DELTA_COMMIT_ACTION,
            deltaInstantTime4);
    HoodieInstant deltaInstant5 = new HoodieInstant(true, HoodieTimeline.DELTA_COMMIT_ACTION,
            deltaInstantTime5);
    commitTimeline.saveAsComplete(deltaInstant4, Optional.empty());
    commitTimeline.saveAsComplete(deltaInstant5, Optional.empty());
    refreshFsView(null);

    List<HoodieDataFile> dataFiles = roView.getAllDataFiles(partitionPath).collect(Collectors.toList());
    if (skipCreatingDataFile) {
        assertTrue("No data file expected", dataFiles.isEmpty());
    } else {
        assertEquals("One data-file is expected as there is only one file-group", 1, dataFiles.size());
        assertEquals("Expect only valid data-file", dataFileName, dataFiles.get(0).getFileName());
    }

    /** Merge API Tests **/
    List<FileSlice> fileSliceList = rtView.getLatestMergedFileSlicesBeforeOrOn(partitionPath, deltaInstantTime5)
            .collect(Collectors.toList());
    assertEquals("Expect file-slice to be merged", 1, fileSliceList.size());
    FileSlice fileSlice = fileSliceList.get(0);
    assertEquals(fileId, fileSlice.getFileId());
    if (!skipCreatingDataFile) {
        assertEquals("Data file must be present", dataFileName, fileSlice.getDataFile().get().getFileName());
    } else {
        assertFalse("No data-file expected as it was not created", fileSlice.getDataFile().isPresent());
    }
    assertEquals("Base Instant of penultimate file-slice must be base instant", instantTime1,
            fileSlice.getBaseInstantTime());
    List<HoodieLogFile> logFiles = fileSlice.getLogFiles().collect(Collectors.toList());
    assertEquals("Log files must include those after compaction request", 4, logFiles.size());
    assertEquals("Log File Order check", fileName4, logFiles.get(0).getFileName());
    assertEquals("Log File Order check", fileName3, logFiles.get(1).getFileName());
    assertEquals("Log File Order check", fileName2, logFiles.get(2).getFileName());
    assertEquals("Log File Order check", fileName1, logFiles.get(3).getFileName());

    fileSliceList = rtView.getLatestFileSlicesBeforeOrOn(partitionPath, deltaInstantTime5)
            .collect(Collectors.toList());
    assertEquals("Expect only one file-id", 1, fileSliceList.size());
    fileSlice = fileSliceList.get(0);
    assertEquals(fileId, fileSlice.getFileId());
    assertFalse("No data-file expected in latest file-slice", fileSlice.getDataFile().isPresent());
    assertEquals("Compaction requested instant must be base instant", compactionRequestedTime,
            fileSlice.getBaseInstantTime());
    logFiles = fileSlice.getLogFiles().collect(Collectors.toList());
    assertEquals("Log files must include only those after compaction request", 2, logFiles.size());
    assertEquals("Log File Order check", fileName4, logFiles.get(0).getFileName());
    assertEquals("Log File Order check", fileName3, logFiles.get(1).getFileName());

    /**  Data Files API tests */
    dataFiles = roView.getLatestDataFiles().collect(Collectors.toList());
    if (skipCreatingDataFile) {
        assertEquals("Expect no data file to be returned", 0, dataFiles.size());
    } else {
        assertEquals("Expect only one data-file to be sent", 1, dataFiles.size());
        dataFiles.stream().forEach(df -> {
            assertEquals("Expect data-file for instant 1 be returned", df.getCommitTime(), instantTime1);
        });
    }
    dataFiles = roView.getLatestDataFiles(partitionPath).collect(Collectors.toList());
    if (skipCreatingDataFile) {
        assertEquals("Expect no data file to be returned", 0, dataFiles.size());
    } else {
        assertEquals("Expect only one data-file to be sent", 1, dataFiles.size());
        dataFiles.stream().forEach(df -> {
            assertEquals("Expect data-file for instant 1 be returned", df.getCommitTime(), instantTime1);
        });
    }
    dataFiles = roView.getLatestDataFilesBeforeOrOn(partitionPath, deltaInstantTime5)
            .collect(Collectors.toList());
    if (skipCreatingDataFile) {
        assertEquals("Expect no data file to be returned", 0, dataFiles.size());
    } else {
        assertEquals("Expect only one data-file to be sent", 1, dataFiles.size());
        dataFiles.stream().forEach(df -> {
            assertEquals("Expect data-file for instant 1 be returned", df.getCommitTime(), instantTime1);
        });
    }
    dataFiles = roView.getLatestDataFilesInRange(allInstantTimes).collect(Collectors.toList());
    if (skipCreatingDataFile) {
        assertEquals("Expect no data file to be returned", 0, dataFiles.size());
    } else {
        assertEquals("Expect only one data-file to be sent", 1, dataFiles.size());
        dataFiles.stream().forEach(df -> {
            assertEquals("Expect data-file for instant 1 be returned", df.getCommitTime(), instantTime1);
        });
    }

    /** Inflight/Orphan File-groups needs to be in the view **/

    // There is a data-file with this inflight file-id
    final String inflightFileId1 = UUID.randomUUID().toString();
    // There is a log-file with this inflight file-id
    final String inflightFileId2 = UUID.randomUUID().toString();
    // There is an orphan data file with this file-id
    final String orphanFileId1 = UUID.randomUUID().toString();
    // There is an orphan log data file with this file-id
    final String orphanFileId2 = UUID.randomUUID().toString();
    final String invalidInstantId = "INVALIDTIME";
    String inflightDeltaInstantTime = "7";
    String orphanDataFileName = FSUtils.makeDataFileName(invalidInstantId, 1, orphanFileId1);
    new File(basePath + "/" + partitionPath + "/" + orphanDataFileName).createNewFile();
    String orphanLogFileName = FSUtils.makeLogFileName(orphanFileId2, HoodieLogFile.DELTA_EXTENSION,
            invalidInstantId, 0);
    new File(basePath + "/" + partitionPath + "/" + orphanLogFileName).createNewFile();
    String inflightDataFileName = FSUtils.makeDataFileName(inflightDeltaInstantTime, 1, inflightFileId1);
    new File(basePath + "/" + partitionPath + "/" + inflightDataFileName).createNewFile();
    String inflightLogFileName = FSUtils.makeLogFileName(inflightFileId2, HoodieLogFile.DELTA_EXTENSION,
            inflightDeltaInstantTime, 0);
    new File(basePath + "/" + partitionPath + "/" + inflightLogFileName).createNewFile();
    // Mark instant as inflight
    commitTimeline.saveToInflight(
            new HoodieInstant(State.INFLIGHT, HoodieTimeline.DELTA_COMMIT_ACTION, inflightDeltaInstantTime),
            Optional.empty());
    refreshFsView(null);

    List<FileSlice> allRawFileSlices = getAllRawFileSlices(partitionPath).collect(Collectors.toList());
    dataFiles = allRawFileSlices.stream().flatMap(slice -> {
        if (slice.getDataFile().isPresent()) {
            return Stream.of(slice.getDataFile().get());
        }
        return Stream.empty();
    }).collect(Collectors.toList());
    assertEquals("Inflight/Orphan data-file is also expected",
            2 + (isCompactionInFlight ? 1 : 0) + (skipCreatingDataFile ? 0 : 1), dataFiles.size());
    Set<String> fileNames = dataFiles.stream().map(HoodieDataFile::getFileName).collect(Collectors.toSet());
    assertTrue("Expect orphan data-file to be present", fileNames.contains(orphanDataFileName));
    assertTrue("Expect inflight data-file to be present", fileNames.contains(inflightDataFileName));
    if (!skipCreatingDataFile) {
        assertTrue("Expect old committed data-file", fileNames.contains(dataFileName));
    }

    if (isCompactionInFlight) {
        assertTrue("Expect inflight compacted data file to be present",
                fileNames.contains(compactDataFileName));
    }

    fileSliceList = getLatestRawFileSlices(partitionPath).collect(Collectors.toList());
    assertEquals("Expect both inflight and orphan file-slice to be included", 5, fileSliceList.size());
    Map<String, FileSlice> fileSliceMap = fileSliceList.stream()
            .collect(Collectors.toMap(FileSlice::getFileId, r -> r));
    FileSlice orphanFileSliceWithDataFile = fileSliceMap.get(orphanFileId1);
    FileSlice orphanFileSliceWithLogFile = fileSliceMap.get(orphanFileId2);
    FileSlice inflightFileSliceWithDataFile = fileSliceMap.get(inflightFileId1);
    FileSlice inflightFileSliceWithLogFile = fileSliceMap.get(inflightFileId2);

    assertEquals("Orphan File Slice with data-file check base-commit", invalidInstantId,
            orphanFileSliceWithDataFile.getBaseInstantTime());
    assertEquals("Orphan File Slice with data-file check data-file", orphanDataFileName,
            orphanFileSliceWithDataFile.getDataFile().get().getFileName());
    assertEquals("Orphan File Slice with data-file check data-file", 0,
            orphanFileSliceWithDataFile.getLogFiles().count());
    assertEquals("Inflight File Slice with data-file check base-commit", inflightDeltaInstantTime,
            inflightFileSliceWithDataFile.getBaseInstantTime());
    assertEquals("Inflight File Slice with data-file check data-file", inflightDataFileName,
            inflightFileSliceWithDataFile.getDataFile().get().getFileName());
    assertEquals("Inflight File Slice with data-file check data-file", 0,
            inflightFileSliceWithDataFile.getLogFiles().count());
    assertEquals("Orphan File Slice with log-file check base-commit", invalidInstantId,
            orphanFileSliceWithLogFile.getBaseInstantTime());
    assertFalse("Orphan File Slice with log-file check data-file",
            orphanFileSliceWithLogFile.getDataFile().isPresent());
    logFiles = orphanFileSliceWithLogFile.getLogFiles().collect(Collectors.toList());
    assertEquals("Orphan File Slice with log-file check data-file", 1, logFiles.size());
    assertEquals("Orphan File Slice with log-file check data-file", orphanLogFileName,
            logFiles.get(0).getFileName());
    assertEquals("Inflight File Slice with log-file check base-commit", inflightDeltaInstantTime,
            inflightFileSliceWithLogFile.getBaseInstantTime());
    assertFalse("Inflight File Slice with log-file check data-file",
            inflightFileSliceWithLogFile.getDataFile().isPresent());
    logFiles = inflightFileSliceWithLogFile.getLogFiles().collect(Collectors.toList());
    assertEquals("Inflight File Slice with log-file check data-file", 1, logFiles.size());
    assertEquals("Inflight File Slice with log-file check data-file", inflightLogFileName,
            logFiles.get(0).getFileName());

    // Now simulate Compaction completing - Check the view
    if (!isCompactionInFlight) {
        // For inflight compaction, we already create a data-file to test concurrent inflight case.
        // If we skipped creating data file corresponding to compaction commit, create it now
        new File(basePath + "/" + partitionPath + "/" + compactDataFileName).createNewFile();
    }
    if (isCompactionInFlight) {
        commitTimeline.deleteInflight(compactionInstant);
    } else {
        commitTimeline.deleteCompactionRequested(compactionInstant);
    }
    compactionInstant = new HoodieInstant(State.INFLIGHT, HoodieTimeline.COMPACTION_ACTION,
            compactionRequestedTime);
    commitTimeline.saveAsComplete(compactionInstant, Optional.empty());
    refreshFsView(null);
    // populate the cache
    roView.getAllDataFiles(partitionPath);

    fileSliceList = rtView.getLatestFileSlices(partitionPath).collect(Collectors.toList());
    dataFiles = fileSliceList.stream().map(FileSlice::getDataFile).filter(Optional::isPresent)
            .map(Optional::get).collect(Collectors.toList());
    System.out.println("fileSliceList : " + fileSliceList);
    assertEquals("Expect only one data-files in latest view as there is only one file-group", 1,
            dataFiles.size());
    assertEquals("Data Filename must match", compactDataFileName, dataFiles.get(0).getFileName());
    assertEquals("Only one latest file-slice in the partition", 1, fileSliceList.size());
    fileSlice = fileSliceList.get(0);
    assertEquals("Check file-Id is set correctly", fileId, fileSlice.getFileId());
    assertEquals("Check data-filename is set correctly", compactDataFileName,
            fileSlice.getDataFile().get().getFileName());
    assertEquals("Ensure base-instant is now compaction request instant", compactionRequestedTime,
            fileSlice.getBaseInstantTime());
    logFiles = fileSlice.getLogFiles().collect(Collectors.toList());
    assertEquals("Only log-files after compaction request shows up", 2, logFiles.size());
    assertEquals("Log File Order check", fileName4, logFiles.get(0).getFileName());
    assertEquals("Log File Order check", fileName3, logFiles.get(1).getFileName());

    /**  Data Files API tests */
    dataFiles = roView.getLatestDataFiles().collect(Collectors.toList());
    assertEquals("Expect only one data-file to be sent", 1, dataFiles.size());
    dataFiles.stream().forEach(df -> {
        assertEquals("Expect data-file created by compaction be returned", df.getCommitTime(),
                compactionRequestedTime);
    });
    dataFiles = roView.getLatestDataFiles(partitionPath).collect(Collectors.toList());
    assertEquals("Expect only one data-file to be sent", 1, dataFiles.size());
    dataFiles.stream().forEach(df -> {
        assertEquals("Expect data-file created by compaction be returned", df.getCommitTime(),
                compactionRequestedTime);
    });
    dataFiles = roView.getLatestDataFilesBeforeOrOn(partitionPath, deltaInstantTime5)
            .collect(Collectors.toList());
    assertEquals("Expect only one data-file to be sent", 1, dataFiles.size());
    dataFiles.stream().forEach(df -> {
        assertEquals("Expect data-file created by compaction be returned", df.getCommitTime(),
                compactionRequestedTime);
    });
    dataFiles = roView.getLatestDataFilesInRange(allInstantTimes).collect(Collectors.toList());
    assertEquals("Expect only one data-file to be sent", 1, dataFiles.size());
    dataFiles.stream().forEach(df -> {
        assertEquals("Expect data-file created by compaction be returned", df.getCommitTime(),
                compactionRequestedTime);
    });
}

From source file:io.pravega.controller.store.stream.InMemoryStream.java

@Override
CompletableFuture<Map<String, Data<Integer>>> getCurrentTxns() {
    synchronized (txnsLock) {
        Map<String, Data<Integer>> map = activeTxns.entrySet().stream()
                .collect(Collectors.toMap(Map.Entry::getKey, x -> copy(x.getValue())));
        return CompletableFuture.completedFuture(Collections.unmodifiableMap(map));
    }//w ww. jav  a 2 s. c  om
}

From source file:com.thinkbiganalytics.nifi.feedmgr.TemplateCreationHelper.java

private List<ProcessorDTO> reassignControllerServiceIds(List<ProcessorDTO> processors,
        TemplateInstance instance) {//ww  w .ja  va 2s.co  m

    Set<ProcessorDTO> updatedProcessors = new HashSet<>();
    if (processors != null) {
        processors.stream().forEach(processorDTO -> {
            Map<String, String> updatedProcessorProperties = new HashMap<>();
            processorDTO.getConfig().getDescriptors().forEach((k, v) -> {
                if (v.getIdentifiesControllerService() != null) {

                    boolean idsMatch = getMergedControllerServices().keySet().stream().anyMatch(
                            id -> id.equalsIgnoreCase(processorDTO.getConfig().getProperties().get(k)));
                    if (!idsMatch && templateProperties != null && !templateProperties.isEmpty()) {

                        NifiProperty matchingProperty = templateProperties.stream()
                                .filter(p -> p.getKey().equalsIgnoreCase(k)
                                        && p.getProcessorName().equalsIgnoreCase(processorDTO.getName())
                                        && v.getIdentifiesControllerService().equalsIgnoreCase(
                                                p.getPropertyDescriptor().getIdentifiesControllerService()))
                                .findFirst().orElse(null);
                        if (matchingProperty != null && matchingProperty.getPropertyDescriptor() != null
                                && matchingProperty.getPropertyDescriptor().getAllowableValues() != null) {
                            NiFiAllowableValue matchingValue = matchingProperty.getPropertyDescriptor()
                                    .getAllowableValues().stream()
                                    .filter(niFiAllowableValue -> niFiAllowableValue.getValue()
                                            .equalsIgnoreCase(matchingProperty.getValue()))
                                    .findFirst().orElse(null);
                            if (matchingValue != null) {
                                String name = matchingValue.getDisplayName();
                                String validControllerServiceId = hasMatchingService(
                                        enabledServiceNameMap, name)
                                                ? enabledServiceNameMap.get(name).get(0).getId()
                                                : hasMatchingService(serviceNameMap, name)
                                                        ? serviceNameMap.get(name).get(0).getId()
                                                        : null;

                                if (StringUtils.isNotBlank(validControllerServiceId)) {
                                    processorDTO.getConfig().getProperties().put(k, validControllerServiceId);
                                    updatedProcessorProperties.put(k, validControllerServiceId);
                                    if (!updatedProcessors.contains(processorDTO)) {
                                        updatedProcessors.add(processorDTO);
                                    }
                                }
                            }
                        }
                    }
                    //if we havent made a match attempt to see if the cs was removed
                    if (!updatedProcessorProperties.containsKey(k) && !idsMatch && instance != null) {
                        String value = processorDTO.getConfig().getProperties().get(k);
                        //find the correct reference from that was removed due to a matching service
                        ControllerServiceDTO controllerServiceDTO = instance
                                .findMatchingControllerServoce(value);
                        if (controllerServiceDTO != null) {
                            updatedProcessorProperties.put(k, controllerServiceDTO.getId());
                        }
                    }
                }

            });
            if (!updatedProcessorProperties.isEmpty()) {
                ProcessorDTO updatedProcessor = new ProcessorDTO();
                updatedProcessor.setId(processorDTO.getId());
                updatedProcessor.setConfig(new ProcessorConfigDTO());
                updatedProcessor.getConfig().setProperties(updatedProcessorProperties);
                //update the processor

                ProcessorDTO updated = restClient.updateProcessor(updatedProcessor);
                updatedProcessors.add(updated);
            }

        });
    }
    //update the data back in the processors list
    if (!updatedProcessors.isEmpty()) {
        Map<String, ProcessorDTO> updatedMap = updatedProcessors.stream()
                .collect(Collectors.toMap(p -> p.getId(), p -> p));
        return processors.stream().map(p -> updatedMap.containsKey(p.getId()) ? updatedMap.get(p.getId()) : p)
                .collect(Collectors.toList());
    }

    return processors;

}

From source file:com.epam.catgenome.manager.bed.BedManager.java

public BedFile reindexBedFile(long bedFileId) throws FeatureIndexException {
    BedFile bedFile = bedFileManager.loadBedFile(bedFileId);
    Reference reference = referenceGenomeManager.loadReferenceGenome(bedFile.getReferenceId());
    Map<String, Chromosome> chromosomeMap = reference.getChromosomes().stream()
            .collect(Collectors.toMap(BaseEntity::getName, chromosome -> chromosome));

    try {/*from ww w.j a  v  a2s.  com*/
        fileManager.deleteFileFeatureIndex(bedFile);
        try (AbstractFeatureReader<BEDFeature, LineIterator> reader = AbstractFeatureReader
                .getFeatureReader(bedFile.getPath(), new BEDCodec(), false)) {
            featureIndexManager.makeIndexForBedReader(bedFile, reader, chromosomeMap);
        }
    } catch (IOException e) {
        throw new FeatureIndexException(bedFile, e);
    }

    return bedFile;
}

From source file:com.github.zhanhb.ckfinder.connector.support.XmlConfigurationParser.java

/**
 * Sets plugins list from XML configuration file.
 *
 * @param builder context builder//  w  w w.j av a 2  s  .c o  m
 * @param childNode child of XML node 'plugins'.
 * @param resourceLoader resource loader to load xml configuration and
 * watermark resource
 */
private void setPlugins(DefaultCKFinderContext.Builder builder, Node childNode, ResourceLoader resourceLoader) {
    NodeList nodeList = childNode.getChildNodes();
    int length = nodeList.getLength();
    List<Plugin> plugins = new ArrayList<>(length);
    for (int i = 0; i < length; i++) {
        Node childChildNode = nodeList.item(i);
        if ("plugin".equals(childChildNode.getNodeName())) {
            PluginInfo pluginInfo = createPluginFromNode(childChildNode);
            String name = pluginInfo.getName();
            if (name != null) {
                Plugin plugin;
                switch (name) {
                case "imageresize":
                    try {
                        plugin = new ImageResizePlugin(pluginInfo.getParams().entrySet().stream()
                                .collect(Collectors.toMap(entry -> ImageResizeParam.valueOf(entry.getKey()),
                                        entry -> new ImageResizeSize(entry.getValue()))));
                    } catch (IllegalArgumentException ex) {
                        plugin = new ImageResizePlugin(ImageResizeParam.createDefaultParams());
                    }
                    break;
                case "watermark":
                    WatermarkSettings watermarkSettings = parseWatermarkSettings(pluginInfo, resourceLoader);
                    plugin = new WatermarkPlugin(watermarkSettings);
                    break;
                case "fileeditor":
                    plugin = new FileEditorPlugin();
                    break;
                default:
                    continue;
                }
                plugins.add(plugin);
            }
        }
    }
    builder.eventsFromPlugins(plugins);
}