Example usage for org.apache.spark.network.util ByteUnit KiB

List of usage examples for org.apache.spark.network.util ByteUnit KiB

Introduction

In this page you can find the example usage for org.apache.spark.network.util ByteUnit KiB.

Prototype

ByteUnit KiB

To view the source code for org.apache.spark.network.util ByteUnit KiB.

Click Source Link

Usage

From source file:org.apache.tinkerpop.gremlin.spark.structure.io.gryo.GryoSerializer.java

License:Apache License

public GryoSerializer(final SparkConf sparkConfiguration) {
    final long bufferSizeKb = sparkConfiguration.getSizeAsKb("spark.kryoserializer.buffer", "64k");
    final long maxBufferSizeMb = sparkConfiguration.getSizeAsMb("spark.kryoserializer.buffer.max", "64m");
    this.referenceTracking = sparkConfiguration.getBoolean("spark.kryo.referenceTracking", true);
    this.registrationRequired = sparkConfiguration.getBoolean(Constants.SPARK_KRYO_REGISTRATION_REQUIRED,
            false);// w w  w.  j  a v  a2 s  . c om
    if (bufferSizeKb >= ByteUnit.GiB.toKiB(2L)) {
        throw new IllegalArgumentException(
                "spark.kryoserializer.buffer must be less than 2048 mb, got: " + bufferSizeKb + " mb.");
    } else {
        this.bufferSize = (int) ByteUnit.KiB.toBytes(bufferSizeKb);
        if (maxBufferSizeMb >= ByteUnit.GiB.toMiB(2L)) {
            throw new IllegalArgumentException(
                    "spark.kryoserializer.buffer.max must be less than 2048 mb, got: " + maxBufferSizeMb
                            + " mb.");
        } else {
            this.maxBufferSize = (int) ByteUnit.MiB.toBytes(maxBufferSizeMb);
            //this.userRegistrator = sparkConfiguration.getOption("spark.kryo.registrator");
        }
    }
    // create a GryoPool and store it in static HadoopPools
    final List<Object> ioRegistries = new ArrayList<>();
    ioRegistries.addAll(makeApacheConfiguration(sparkConfiguration).getList(IoRegistry.IO_REGISTRY,
            Collections.emptyList()));
    ioRegistries.add(SparkIoRegistry.class.getCanonicalName()
            .replace("." + SparkIoRegistry.class.getSimpleName(), "$" + SparkIoRegistry.class.getSimpleName()));
    HadoopPools.initialize(GryoPool.build()
            .poolSize(sparkConfiguration.getInt(GryoPool.CONFIG_IO_GRYO_POOL_SIZE,
                    GryoPool.CONFIG_IO_GRYO_POOL_SIZE_DEFAULT))
            .ioRegistries(ioRegistries).initializeMapper(builder -> builder
                    .referenceTracking(this.referenceTracking).registrationRequired(this.registrationRequired))
            .create());
}