Example usage for com.google.common.collect HashBiMap create

List of usage examples for com.google.common.collect HashBiMap create

Introduction

In this page you can find the example usage for com.google.common.collect HashBiMap create.

Prototype

public static <K, V> HashBiMap<K, V> create() 

Source Link

Document

Returns a new, empty HashBiMap with the default initial capacity (16).

Usage

From source file:com.linkedin.pinot.core.startree.OffHeapStarTreeBuilder.java

public void init(StarTreeBuilderConfig builderConfig) throws Exception {
    schema = builderConfig.schema;//  w  w  w.j a  va2s .c om
    timeColumnName = schema.getTimeColumnName();
    this.dimensionsSplitOrder = builderConfig.dimensionsSplitOrder;
    skipStarNodeCreationForDimensions = builderConfig.getSkipStarNodeCreationForDimensions();
    skipMaterializationForDimensions = builderConfig.getSkipMaterializationForDimensions();
    skipMaterializationCardinalityThreshold = builderConfig.getSkipMaterializationCardinalityThreshold();
    enableOffHeapFormat = builderConfig.isEnableOffHealpFormat();

    this.maxLeafRecords = builderConfig.maxLeafRecords;
    this.outDir = builderConfig.getOutDir();
    if (outDir == null) {
        outDir = new File(System.getProperty("java.io.tmpdir"),
                V1Constants.STAR_TREE_INDEX_DIR + "_" + DateTime.now());
    }
    LOG.info("Index output directory:{}", outDir);

    dimensionTypes = new ArrayList<>();
    dimensionNames = new ArrayList<>();
    dimensionNameToIndexMap = HashBiMap.create();
    dimensionNameToStarValueMap = new HashMap<>();
    dictionaryMap = new HashMap<>();

    // READ DIMENSIONS COLUMNS
    List<DimensionFieldSpec> dimensionFieldSpecs = schema.getDimensionFieldSpecs();
    for (int index = 0; index < dimensionFieldSpecs.size(); index++) {
        DimensionFieldSpec spec = dimensionFieldSpecs.get(index);
        String dimensionName = spec.getName();
        dimensionNames.add(dimensionName);
        dimensionNameToIndexMap.put(dimensionName, index);
        Object starValue;
        starValue = getAllStarValue(spec);
        dimensionNameToStarValueMap.put(dimensionName, starValue);
        dimensionTypes.add(spec.getDataType());
        HashBiMap<Object, Integer> dictionary = HashBiMap.create();
        dictionaryMap.put(dimensionName, dictionary);
    }
    // treat time column as just another dimension, only difference is that we will never split on
    // this dimension unless explicitly specified in split order
    if (timeColumnName != null) {
        dimensionNames.add(timeColumnName);
        TimeFieldSpec timeFieldSpec = schema.getTimeFieldSpec();
        dimensionTypes.add(timeFieldSpec.getDataType());
        int index = dimensionNameToIndexMap.size();
        dimensionNameToIndexMap.put(timeColumnName, index);
        Object starValue;
        starValue = getAllStarValue(timeFieldSpec);
        dimensionNameToStarValueMap.put(timeColumnName, starValue);
        HashBiMap<Object, Integer> dictionary = HashBiMap.create();
        dictionaryMap.put(schema.getTimeColumnName(), dictionary);
    }
    dimensionSizeBytes = dimensionNames.size() * Integer.SIZE / 8;
    this.numDimensions = dimensionNames.size();

    // READ METRIC COLUMNS
    this.metricNames = new ArrayList<>();

    this.metricNameToIndexMap = new HashMap<>();
    this.metricSizeBytes = 0;
    List<MetricFieldSpec> metricFieldSpecs = schema.getMetricFieldSpecs();
    for (int index = 0; index < metricFieldSpecs.size(); index++) {
        MetricFieldSpec spec = metricFieldSpecs.get(index);
        String metricName = spec.getName();
        metricNames.add(metricName);
        metricNameToIndexMap.put(metricName, index);
        metricSizeBytes += spec.getFieldSize();
    }
    numMetrics = metricNames.size();
    builderConfig.getOutDir().mkdirs();
    dataFile = new File(outDir, "star-tree.buf");
    LOG.info("StarTree output data file: {}", dataFile.getAbsolutePath());
    dataBuffer = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(dataFile)));

    // INITIALIZE THE ROOT NODE
    this.starTreeRootIndexNode = new StarTreeIndexNode();
    this.starTreeRootIndexNode.setDimensionName(StarTreeIndexNodeInterf.ALL);
    this.starTreeRootIndexNode.setDimensionValue(StarTreeIndexNodeInterf.ALL);
    this.starTreeRootIndexNode.setLevel(0);
    LOG.info("dimensionNames:{}", dimensionNames);
    LOG.info("metricNames:{}", metricNames);
}

From source file:controller.FXScatterPlot.java

@SuppressWarnings({ "rawtypes", "unchecked" })
public void paintScatterChart() {
    isPainted = false;// www  .  j  a v a  2  s  .co m

    Task<Void> task = new Task<Void>() {

        @Override
        protected Void call() throws Exception {
            Platform.runLater(new Runnable() {

                @Override
                public void run() {
                    progressIndicator.setVisible(true);
                }
            });

            computeFeatures();

            Platform.runLater(new Runnable() {
                @Override
                public void run() {
                    scatterChart.getData().clear();

                    final double[][] output = featureModel.getTsneFeatures();

                    seriesRem = new XYChart.Series();
                    seriesRem.setName("REM");

                    seriesN1 = new XYChart.Series();
                    seriesN1.setName("N1");

                    seriesN2 = new XYChart.Series();
                    seriesN2.setName("N2");

                    seriesN3 = new XYChart.Series();
                    seriesN3.setName("N3");

                    seriesWake = new XYChart.Series();
                    seriesWake.setName("Awake");

                    seriesUnclassified = new XYChart.Series();
                    seriesUnclassified.setName("Unclassified");

                    scatterChart.getData().add(seriesRem);
                    scatterChart.getData().add(seriesN1);
                    scatterChart.getData().add(seriesN2);
                    scatterChart.getData().add(seriesN3);
                    scatterChart.getData().add(seriesWake);
                    scatterChart.getData().add(seriesUnclassified);

                    plotItemsMap = HashBiMap.create();

                    for (int i = 0; i < featureModel.getNumberOfEpochs(); i++) {

                        XYChart.Data dataItem;
                        switch (featureModel.getLabel(i)) {
                        case 0:
                            dataItem = new XYChart.Data(output[i][0], output[i][1]);
                            seriesWake.getData().add(dataItem);
                            plotItemsMap.put(dataItem.getNode(), i);
                            break;
                        case 1:
                            dataItem = new XYChart.Data(output[i][0], output[i][1]);
                            seriesN1.getData().add(dataItem);
                            plotItemsMap.put(dataItem.getNode(), i);
                            break;
                        case 2:
                            dataItem = new XYChart.Data(output[i][0], output[i][1]);
                            seriesN2.getData().add(dataItem);
                            plotItemsMap.put(dataItem.getNode(), i);
                            break;
                        case 3:
                            dataItem = new XYChart.Data(output[i][0], output[i][1]);
                            seriesN3.getData().add(dataItem);
                            plotItemsMap.put(dataItem.getNode(), i);
                            break;
                        case 5:
                            dataItem = new XYChart.Data(output[i][0], output[i][1]);
                            seriesRem.getData().add(dataItem);
                            plotItemsMap.put(dataItem.getNode(), i);
                            break;
                        case -1:
                            dataItem = new XYChart.Data(output[i][0], output[i][1]);
                            seriesUnclassified.getData().add(dataItem);
                            plotItemsMap.put(dataItem.getNode(), i);
                            break;
                        }

                    }

                    //this is a hack, because JavaFX won't update the legend otherwise
                    scatterChart.getData().add(new XYChart.Series());
                    scatterChart.getData().remove(6);

                    nodeToXYDataMap = new HashMap();

                    for (XYChart.Series<Number, Number> series : scatterChart.getData()) {
                        for (XYChart.Data<Number, Number> d : series.getData()) {
                            nodeToXYDataMap.put(d.getNode(), d);

                            d.getNode().setOpacity(opacity);
                            bm = d.getNode().blendModeProperty().get();

                            d.getNode().setOnMouseClicked(new EventHandler<MouseEvent>() {

                                @Override
                                public void handle(MouseEvent event) {
                                    appController.goToEpoch(plotItemsMap.get(d.getNode()));
                                    d.getNode().toBack();
                                }
                            });

                            d.getNode().setOnMouseEntered(new EventHandler<MouseEvent>() {

                                @Override
                                public void handle(MouseEvent arg0) {
                                    d.getNode().setEffect(ds);
                                    d.getNode().setCursor(Cursor.HAND);
                                    d.getNode().setOpacity(1.);
                                    d.getNode().blendModeProperty().set(BlendMode.SRC_OVER);
                                }
                            });

                            d.getNode().setOnMouseExited(new EventHandler<MouseEvent>() {

                                @Override
                                public void handle(MouseEvent arg0) {
                                    d.getNode().setEffect(null);
                                    d.getNode().setCursor(Cursor.DEFAULT);
                                    d.getNode().setOpacity(opacity);
                                    d.getNode().blendModeProperty().set(bm);
                                }
                            });
                        }
                    }

                    scatterChart.requestFocus();
                    isPainted = true;
                }

            });

            Platform.runLater(new Runnable() {

                @Override
                public void run() {
                    progressIndicator.setVisible(false);
                    appController.updateWindows();
                }
            });

            return null;
        }

    };

    Thread thread = new Thread(task);
    thread.setDaemon(true);
    thread.start();
}

From source file:co.turnus.dataflow.impl.NetworkImpl.java

/**
 * <!-- begin-user-doc --> <!-- end-user-doc -->
 * /*  w  w w.ja v a  2  s.c  o  m*/
 * @generated NOT
 */
protected NetworkImpl() {
    super();

    actorsMap = HashBiMap.create();
    actorClassesMaps = HashBiMap.create();

    eAdapters().add(new NetworkAdapter(actorClassesMaps, actorsMap));
}

From source file:org.granitemc.granite.GraniteStartupThread.java

public void loadBlocks() {
    Class<?> blockClass = Mappings.getClass("Block");

    try {//from   ww w  .j av  a 2 s . c o m
        Field nameField = Mappings.getField(blockClass, "blockWithMetadata");
        nameField.setAccessible(true);

        Field nameMapField = BlockTypes.class.getDeclaredField("nameMap");
        nameMapField.setAccessible(true);
        BiMap<String, BlockType> nameMap = HashBiMap.create();
        nameMapField.set(null, nameMap);

        Field idMapField = BlockTypes.class.getDeclaredField("idMap");
        idMapField.setAccessible(true);
        BiMap<Integer, BlockType> idMap = HashBiMap.create();
        idMapField.set(null, idMap);

        for (Object block : (Iterable) Mappings.getField(blockClass, "blockRegistry").get(null)) {
            String fullName = nameField.get(block).toString();
            String name = fullName.split(":")[1].split("\\[")[0].split(",")[0];

            Object metadata = Mappings.getField(blockClass, "blockMetadata").get(block);
            Collection variants = (Collection) Mappings.getField(metadata.getClass(), "variants").get(metadata);

            GraniteBlockType type = (GraniteBlockType) MinecraftUtils.wrap(variants.iterator().next());
            int id = (int) Mappings.invokeStatic("Block", "getIdFromBlock", block);
            nameMap.put(name, type);
            idMap.put(id, type);

            BlockTypes.class.getDeclaredField(name).set(null, type);
        }
    } catch (IllegalAccessException | NoSuchFieldException e) {
        e.printStackTrace();
    }
}

From source file:org.prorefactor.refactor.PUB.java

/**
 * It's possible, maybe even sensible, to reuse a PUB object. This method clears out old lists in preparation for
 * reloading or rebuilding.//  ww w .j  av a 2  s  .  co  m
 */
private void _refresh() {
    exportList = new ArrayList<SymbolRef>();
    fileList = new ArrayList<String>();
    fileIndexes = new IntegerIndex<String>();
    importList = new ArrayList<SymbolRef>();
    tableMap = new TreeMap<String, TableRef>();
    stringTable = HashBiMap.create();
    /*
     * String index zero is not used. This allows us to use 0 from JPNode.attrGet() to indicate
     * "no string value present".
     */
    stringIndex("");
}

From source file:org.jetbrains.jet.lang.resolve.OverridingUtil.java

/**
 * @param forOverride true for override, false for overload
 *//*from ww  w .  j av a2s.c o  m*/
static OverrideCompatibilityInfo isOverridableByImpl(@NotNull CallableDescriptor superDescriptor,
        @NotNull CallableDescriptor subDescriptor, boolean forOverride) {

    // TODO : Visibility

    if (compiledValueParameterCount(superDescriptor) != compiledValueParameterCount(subDescriptor)) {
        return OverrideCompatibilityInfo.valueParameterNumberMismatch();
    }

    List<JetType> superValueParameters = compiledValueParameters(superDescriptor);
    List<JetType> subValueParameters = compiledValueParameters(subDescriptor);

    if (forOverride) {
        if (superDescriptor.getTypeParameters().size() != subDescriptor.getTypeParameters().size()) {
            for (int i = 0; i < superValueParameters.size(); ++i) {
                JetType superValueParameterType = getUpperBound(superValueParameters.get(i));
                JetType subValueParameterType = getUpperBound(subValueParameters.get(i));
                // TODO: compare erasure
                if (!JetTypeChecker.INSTANCE.equalTypes(superValueParameterType, subValueParameterType)) {
                    return OverrideCompatibilityInfo.typeParameterNumberMismatch();
                }
            }
            return OverrideCompatibilityInfo.valueParameterTypeMismatch(null, null,
                    OverrideCompatibilityInfo.Result.CONFLICT);
        }
    }

    if (forOverride) {

        List<TypeParameterDescriptor> superTypeParameters = superDescriptor.getTypeParameters();
        List<TypeParameterDescriptor> subTypeParameters = subDescriptor.getTypeParameters();

        BiMap<TypeConstructor, TypeConstructor> axioms = HashBiMap.create();
        for (int i = 0, typeParametersSize = superTypeParameters.size(); i < typeParametersSize; i++) {
            TypeParameterDescriptor superTypeParameter = superTypeParameters.get(i);
            TypeParameterDescriptor subTypeParameter = subTypeParameters.get(i);
            axioms.put(superTypeParameter.getTypeConstructor(), subTypeParameter.getTypeConstructor());
        }

        for (int i = 0, typeParametersSize = superTypeParameters.size(); i < typeParametersSize; i++) {
            TypeParameterDescriptor superTypeParameter = superTypeParameters.get(i);
            TypeParameterDescriptor subTypeParameter = subTypeParameters.get(i);

            if (!JetTypeChecker.INSTANCE.equalTypes(superTypeParameter.getUpperBoundsAsType(),
                    subTypeParameter.getUpperBoundsAsType(), axioms)) {
                return OverrideCompatibilityInfo.boundsMismatch(superTypeParameter, subTypeParameter);
            }
        }

        for (int i = 0, unsubstitutedValueParametersSize = superValueParameters
                .size(); i < unsubstitutedValueParametersSize; i++) {
            JetType superValueParameter = superValueParameters.get(i);
            JetType subValueParameter = subValueParameters.get(i);

            boolean bothErrors = ErrorUtils.isErrorType(superValueParameter)
                    && ErrorUtils.isErrorType(subValueParameter);
            if (!bothErrors
                    && !JetTypeChecker.INSTANCE.equalTypes(superValueParameter, subValueParameter, axioms)) {
                return OverrideCompatibilityInfo.valueParameterTypeMismatch(superValueParameter,
                        subValueParameter, OverrideCompatibilityInfo.Result.INCOMPATIBLE);
            }
        }

    } else {

        for (int i = 0; i < superValueParameters.size(); ++i) {
            JetType superValueParameterType = getUpperBound(superValueParameters.get(i));
            JetType subValueParameterType = getUpperBound(subValueParameters.get(i));
            // TODO: compare erasure
            if (!JetTypeChecker.INSTANCE.equalTypes(superValueParameterType, subValueParameterType)) {
                return OverrideCompatibilityInfo.valueParameterTypeMismatch(superValueParameterType,
                        subValueParameterType, OverrideCompatibilityInfo.Result.INCOMPATIBLE);
            }
        }

        return OverrideCompatibilityInfo.success();

    }

    // TODO : Default values, varargs etc

    return OverrideCompatibilityInfo.success();
}

From source file:org.eclipse.elk.alg.graphviz.dot.transform.DotExporter.java

/**
 * Transforms the KGraph instance to a Dot instance using the given command.
 * // ww w  .  j  a va 2  s. co m
 * @param transData the transformation data instance
 */
public void transform(final IDotTransformationData<KNode, GraphvizModel> transData) {
    BiMap<String, KGraphElement> graphElems = HashBiMap.create();
    transData.setProperty(GRAPH_ELEMS, graphElems);
    KNode kgraph = transData.getSourceGraph();
    GraphvizModel graphvizModel = DotFactory.eINSTANCE.createGraphvizModel();
    Graph graph = DotFactory.eINSTANCE.createGraph();
    graph.setType(GraphType.DIGRAPH);
    graphvizModel.getGraphs().add(graph);
    transformNodes(kgraph, graph.getStatements(), new KVector(), transData);
    transformEdges(kgraph, graph.getStatements(), transData);
    transData.getTargetGraphs().add(graphvizModel);
}

From source file:net.librec.data.convertor.TextDataConvertor.java

/**
 * Read data from the data file. Note that we didn't take care of the
 * duplicated lines.//from  w  w  w. ja  va  2s  . co m
 *
 * @param dataColumnFormat
 *            the format of input data file
 * @param inputDataPath
 *            the path of input data file
 * @param binThold
 *            the threshold to binarize a rating. If a rating is greater
 *            than the threshold, the value will be 1; otherwise 0. To
 *            disable this appender, i.e., keep the original rating value,
 *            set the threshold a negative value
 * @throws IOException
 *            if the <code>inputDataPath</code> is not valid.
 */
private void readData(String dataColumnFormat, String inputDataPath, double binThold) throws IOException {
    LOG.info(String.format("Dataset: %s", StringUtil.last(inputDataPath, 38)));
    // Table {row-id, col-id, rate}
    Table<Integer, Integer, Double> dataTable = HashBasedTable.create();
    // Table {row-id, col-id, timestamp}
    Table<Integer, Integer, Long> timeTable = null;
    // Map {col-id, multiple row-id}: used to fast build a rating matrix
    Multimap<Integer, Integer> colMap = HashMultimap.create();
    // BiMap {raw id, inner id} userIds, itemIds
    if (this.userIds == null) {
        this.userIds = HashBiMap.create();
    }
    if (this.itemIds == null) {
        this.itemIds = HashBiMap.create();
    }
    final List<File> files = new ArrayList<File>();
    final ArrayList<Long> fileSizeList = new ArrayList<Long>();
    SimpleFileVisitor<Path> finder = new SimpleFileVisitor<Path>() {
        @Override
        public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
            fileSizeList.add(file.toFile().length());
            files.add(file.toFile());
            return super.visitFile(file, attrs);
        }
    };
    Files.walkFileTree(Paths.get(inputDataPath), finder);
    LOG.info("All dataset files " + files.toString());
    long allFileSize = 0;
    for (Long everyFileSize : fileSizeList) {
        allFileSize = allFileSize + everyFileSize.longValue();
    }
    LOG.info("All dataset files size " + Long.toString(allFileSize));
    int readingFileCount = 0;
    long loadAllFileByte = 0;
    // loop every dataFile collecting from walkFileTree
    for (File dataFile : files) {
        LOG.info("Now loading dataset file " + dataFile.toString().substring(
                dataFile.toString().lastIndexOf(File.separator) + 1, dataFile.toString().lastIndexOf(".")));
        readingFileCount += 1;
        loadFilePathRate = readingFileCount / (float) files.size();
        long readingOneFileByte = 0;
        FileInputStream fis = new FileInputStream(dataFile);
        FileChannel fileRead = fis.getChannel();
        ByteBuffer buffer = ByteBuffer.allocate(BSIZE);
        int len;
        String bufferLine = new String();
        byte[] bytes = new byte[BSIZE];
        while ((len = fileRead.read(buffer)) != -1) {
            readingOneFileByte += len;
            loadDataFileRate = readingOneFileByte / (float) fileRead.size();
            loadAllFileByte += len;
            loadAllFileRate = loadAllFileByte / (float) allFileSize;
            buffer.flip();
            buffer.get(bytes, 0, len);
            bufferLine = bufferLine.concat(new String(bytes, 0, len));
            bufferLine = bufferLine.replaceAll("\r", "\n");
            String[] bufferData = bufferLine.split("(\n)+");
            boolean isComplete = bufferLine.endsWith("\n");
            int loopLength = isComplete ? bufferData.length : bufferData.length - 1;
            for (int i = 0; i < loopLength; i++) {
                String line = new String(bufferData[i]);
                String[] data = line.trim().split("[ \t,]+");
                String user = data[0];
                String item = data[1];
                Double rate = ((dataColumnFormat.equals("UIR") || dataColumnFormat.equals("UIRT"))
                        && data.length >= 3) ? Double.valueOf(data[2]) : 1.0;

                // binarize the rating for item recommendation task
                if (binThold >= 0) {
                    rate = rate > binThold ? 1.0 : 0.0;
                }

                // inner id starting from 0
                int row = userIds.containsKey(user) ? userIds.get(user) : userIds.size();
                userIds.put(user, row);

                int col = itemIds.containsKey(item) ? itemIds.get(item) : itemIds.size();
                itemIds.put(item, col);

                dataTable.put(row, col, rate);
                colMap.put(col, row);
                // record rating's issuing time
                if (StringUtils.equals(dataColumnFormat, "UIRT") && data.length >= 4) {
                    if (timeTable == null) {
                        timeTable = HashBasedTable.create();
                    }
                    // convert to million-seconds
                    long mms = 0L;
                    try {
                        mms = Long.parseLong(data[3]); // cannot format
                        // 9.7323480e+008
                    } catch (NumberFormatException e) {
                        mms = (long) Double.parseDouble(data[3]);
                    }
                    long timestamp = timeUnit.toMillis(mms);
                    timeTable.put(row, col, timestamp);
                }
            }
            if (!isComplete) {
                bufferLine = bufferData[bufferData.length - 1];
            }
            buffer.clear();
        }
        fileRead.close();
        fis.close();
    }
    int numRows = numUsers(), numCols = numItems();
    // build rating matrix
    preferenceMatrix = new SparseMatrix(numRows, numCols, dataTable, colMap);
    if (timeTable != null)
        datetimeMatrix = new SparseMatrix(numRows, numCols, timeTable, colMap);
    // release memory of data table
    dataTable = null;
    timeTable = null;
}

From source file:de.iteratec.iteraplan.elasticeam.model.compare.impl.DiffBuilderImpl.java

private void processMatches(DiffBuilderResultImpl result) {
    //Prepare instance map
    BiMap<UniversalModelExpression, UniversalModelExpression> left2right = HashBiMap.create();
    for (Entry<UniversalTypeExpression, Set<MatchingPair>> matched : matchResult.getMatches().entrySet()) {
        for (MatchingPair pair : matched.getValue()) {
            left2right.put(pair.getLeftExpression(), pair.getRightExpression());
        }//from   w ww.ja  va 2  s  . c o m
    }

    // process matches
    for (Entry<UniversalTypeExpression, Set<MatchingPair>> matched : matchResult.getMatches().entrySet()) {
        for (MatchingPair pair : matched.getValue()) {
            TwoSidedDiffImpl diff = new TwoSidedDiffImpl(matched.getKey(), pair.getLeftExpression(),
                    pair.getRightExpression());
            for (PropertyExpression<?> property : matched.getKey().getProperties()) {
                diffProperty(property, pair.getLeftExpression(), pair.getRightExpression(), diff);
            }
            for (RelationshipEndExpression re : matched.getKey().getRelationshipEnds()) {
                diffRelationshipEnd(re, pair, left2right, diff, result);
            }
            if (!diff.getDiffParts().isEmpty()) {
                result.addDiff(diff);
            }
        }
    }
}

From source file:io.prestosql.sql.planner.NodePartitioningManager.java

public NodePartitionMap getNodePartitioningMap(Session session, PartitioningHandle partitioningHandle) {
    requireNonNull(session, "session is null");
    requireNonNull(partitioningHandle, "partitioningHandle is null");

    if (partitioningHandle.getConnectorHandle() instanceof SystemPartitioningHandle) {
        return ((SystemPartitioningHandle) partitioningHandle.getConnectorHandle()).getNodePartitionMap(session,
                nodeScheduler);/* w ww . java2s  .  c  om*/
    }

    ConnectorId connectorId = partitioningHandle.getConnectorId()
            .orElseThrow(() -> new IllegalArgumentException(
                    "No connector ID for partitioning handle: " + partitioningHandle));
    ConnectorNodePartitioningProvider partitioningProvider = partitioningProviders.get(connectorId);
    checkArgument(partitioningProvider != null, "No partitioning provider for connector %s", connectorId);

    ConnectorBucketNodeMap connectorBucketNodeMap = getConnectorBucketNodeMap(session, partitioningHandle);
    // safety check for crazy partitioning
    checkArgument(connectorBucketNodeMap.getBucketCount() < 1_000_000, "Too many buckets in partitioning: %s",
            connectorBucketNodeMap.getBucketCount());

    List<Node> bucketToNode;
    if (connectorBucketNodeMap.hasFixedMapping()) {
        bucketToNode = connectorBucketNodeMap.getFixedMapping();
    } else {
        bucketToNode = createArbitraryBucketToNode(nodeScheduler.createNodeSelector(connectorId).allNodes(),
                connectorBucketNodeMap.getBucketCount());
    }

    int[] bucketToPartition = new int[connectorBucketNodeMap.getBucketCount()];
    BiMap<Node, Integer> nodeToPartition = HashBiMap.create();
    int nextPartitionId = 0;
    for (int bucket = 0; bucket < bucketToNode.size(); bucket++) {
        Node node = bucketToNode.get(bucket);
        Integer partitionId = nodeToPartition.get(node);
        if (partitionId == null) {
            partitionId = nextPartitionId++;
            nodeToPartition.put(node, partitionId);
        }
        bucketToPartition[bucket] = partitionId;
    }

    List<Node> partitionToNode = IntStream.range(0, nodeToPartition.size())
            .mapToObj(partitionId -> nodeToPartition.inverse().get(partitionId)).collect(toImmutableList());

    return new NodePartitionMap(partitionToNode, bucketToPartition,
            getSplitToBucket(session, partitioningHandle));
}