@Override public <T> T deserialize(final ByteBuffer byteBuffer, final ClassTag<T> classTag) { this.input.setBuffer(byteBuffer.array()); return this.gryoSerializer.getGryoPool().readWithKryo(kryo -> (T) kryo.readClassAndObject(this.input)); }
@Override public <T> SerializationStream writeObject(final T t, final ClassTag<T> classTag) { this.gryoSerializer.getGryoPool().writeWithKryo(kryo -> kryo.writeClassAndObject(this.output, t)); return this; }
/** * Create the {@code GryoPool} from this builder. * * @return the new pool */ public GryoPool create() { final GryoMapper.Builder mapper = GryoMapper.build().version(version); final GryoPool gryoPool = new GryoPool(); if (null != this.ioRegistries) this.ioRegistries.forEach(mapper::addRegistry); if (null != this.gryoMapperConsumer) this.gryoMapperConsumer.accept(mapper); gryoPool.createPool(this.poolSize, this.type, mapper.create()); return gryoPool; } }
public void doWithWriter(final Consumer<GryoWriter> writerFunction) { final GryoWriter gryoWriter = takeWriter(); writerFunction.accept(gryoWriter); offerWriter(gryoWriter); }
public <A> A doWithReader(final Function<GryoReader, A> readerFunction) { final GryoReader gryoReader = takeReader(); final A a = readerFunction.apply(gryoReader); offerReader(gryoReader); return a; }
public synchronized static void initialize(final Configuration configuration) { if (!INITIALIZED) { INITIALIZED = true; GRYO_POOL = GryoPool.build(). poolSize(configuration.getInt(GryoPool.CONFIG_IO_GRYO_POOL_SIZE, 256)). version(GryoVersion.valueOf(configuration.getString(GryoPool.CONFIG_IO_GRYO_VERSION, GryoPool.CONFIG_IO_GRYO_POOL_VERSION_DEFAULT.name()))). ioRegistries(configuration.getList(IoRegistry.IO_REGISTRY, Collections.emptyList())). initializeMapper(m -> m.registrationRequired(false)). create(); } }
public GryoSerializer(final SparkConf sparkConfiguration) { final long bufferSizeKb = sparkConfiguration.getSizeAsKb("spark.kryoserializer.buffer", "64k"); final long maxBufferSizeMb = sparkConfiguration.getSizeAsMb("spark.kryoserializer.buffer.max", "64m"); this.referenceTracking = sparkConfiguration.getBoolean("spark.kryo.referenceTracking", true); this.registrationRequired = sparkConfiguration.getBoolean(Constants.SPARK_KRYO_REGISTRATION_REQUIRED, false); if (bufferSizeKb >= ByteUnit.GiB.toKiB(2L)) { throw new IllegalArgumentException("spark.kryoserializer.buffer must be less than 2048 mb, got: " + bufferSizeKb + " mb."); } else { this.bufferSize = (int) ByteUnit.KiB.toBytes(bufferSizeKb); if (maxBufferSizeMb >= ByteUnit.GiB.toMiB(2L)) { throw new IllegalArgumentException("spark.kryoserializer.buffer.max must be less than 2048 mb, got: " + maxBufferSizeMb + " mb."); } else { this.maxBufferSize = (int) ByteUnit.MiB.toBytes(maxBufferSizeMb); //this.userRegistrator = sparkConfiguration.getOption("spark.kryo.registrator"); } } // create a GryoPool and store it in static HadoopPools final List<Object> ioRegistries = new ArrayList<>(); ioRegistries.addAll(makeApacheConfiguration(sparkConfiguration).getList(IoRegistry.IO_REGISTRY, Collections.emptyList())); ioRegistries.add(SparkIoRegistry.class.getCanonicalName().replace("." + SparkIoRegistry.class.getSimpleName(), "$" + SparkIoRegistry.class.getSimpleName())); HadoopPools.initialize(GryoPool.build(). version(GryoVersion.valueOf(sparkConfiguration.get(GryoPool.CONFIG_IO_GRYO_VERSION, GryoPool.CONFIG_IO_GRYO_POOL_VERSION_DEFAULT.name()))). poolSize(sparkConfiguration.getInt(GryoPool.CONFIG_IO_GRYO_POOL_SIZE, GryoPool.CONFIG_IO_GRYO_POOL_SIZE_DEFAULT)). ioRegistries(ioRegistries). initializeMapper(builder -> builder.referenceTracking(this.referenceTracking). registrationRequired(this.registrationRequired)). create()); }
public void doWithWriter(final Consumer<GryoWriter> writerFunction) { final GryoWriter gryoWriter = takeWriter(); writerFunction.accept(gryoWriter); offerWriter(gryoWriter); }
public <A> A doWithReader(final Function<GryoReader, A> readerFunction) { final GryoReader gryoReader = takeReader(); final A a = readerFunction.apply(gryoReader); offerReader(gryoReader); return a; }
@Override public <T> T deserialize(final ByteBuffer byteBuffer, final ClassLoader classLoader, final ClassTag<T> classTag) { this.input.setBuffer(byteBuffer.array()); return this.gryoSerializer.getGryoPool().readWithKryo(kryo -> { kryo.setClassLoader(classLoader); return (T) kryo.readClassAndObject(this.input); }); }
@Override public <T> ByteBuffer serialize(final T t, final ClassTag<T> classTag) { this.gryoSerializer.getGryoPool().writeWithKryo(kryo -> kryo.writeClassAndObject(this.output, t)); return ByteBuffer.wrap(this.output.getBuffer()); }
public synchronized static void initialize(final Configuration configuration) { if (!INITIALIZED) { INITIALIZED = true; GRYO_POOL = GryoPool.build(). poolSize(configuration.getInt(GryoPool.CONFIG_IO_GRYO_POOL_SIZE, 256)). version(GryoVersion.valueOf(configuration.getString(GryoPool.CONFIG_IO_GRYO_VERSION, GryoPool.CONFIG_IO_GRYO_POOL_VERSION_DEFAULT.name()))). ioRegistries(configuration.getList(IoRegistry.IO_REGISTRY, Collections.emptyList())). initializeMapper(m -> m.registrationRequired(false)). create(); } }
/** * Create the {@code GryoPool} from this builder. * * @return the new pool */ public GryoPool create() { final GryoMapper.Builder mapper = GryoMapper.build().version(version); final GryoPool gryoPool = new GryoPool(); if (null != this.ioRegistries) this.ioRegistries.forEach(mapper::addRegistry); if (null != this.gryoMapperConsumer) this.gryoMapperConsumer.accept(mapper); gryoPool.createPool(this.poolSize, this.type, mapper.create()); return gryoPool; } }
@Override public Object readClassAndObject(final InputStream inputStream) { return HadoopPools.getGryoPool().readWithKryo(kryo -> kryo.readClassAndObject(new Input(inputStream))); }
@Override public void writeClassAndObject(final Object object, final OutputStream outputStream) { HadoopPools.getGryoPool().writeWithKryo(kryo -> { final Output output = new Output(outputStream); kryo.writeClassAndObject(output, object); output.flush(); }); }
public GryoSerializer(final SparkConf sparkConfiguration) { final long bufferSizeKb = sparkConfiguration.getSizeAsKb("spark.kryoserializer.buffer", "64k"); final long maxBufferSizeMb = sparkConfiguration.getSizeAsMb("spark.kryoserializer.buffer.max", "64m"); this.referenceTracking = sparkConfiguration.getBoolean("spark.kryo.referenceTracking", true); this.registrationRequired = sparkConfiguration.getBoolean(Constants.SPARK_KRYO_REGISTRATION_REQUIRED, false); if (bufferSizeKb >= ByteUnit.GiB.toKiB(2L)) { throw new IllegalArgumentException("spark.kryoserializer.buffer must be less than 2048 mb, got: " + bufferSizeKb + " mb."); } else { this.bufferSize = (int) ByteUnit.KiB.toBytes(bufferSizeKb); if (maxBufferSizeMb >= ByteUnit.GiB.toMiB(2L)) { throw new IllegalArgumentException("spark.kryoserializer.buffer.max must be less than 2048 mb, got: " + maxBufferSizeMb + " mb."); } else { this.maxBufferSize = (int) ByteUnit.MiB.toBytes(maxBufferSizeMb); //this.userRegistrator = sparkConfiguration.getOption("spark.kryo.registrator"); } } // create a GryoPool and store it in static HadoopPools final List<Object> ioRegistries = new ArrayList<>(); ioRegistries.addAll(makeApacheConfiguration(sparkConfiguration).getList(IoRegistry.IO_REGISTRY, Collections.emptyList())); ioRegistries.add(SparkIoRegistry.class.getCanonicalName().replace("." + SparkIoRegistry.class.getSimpleName(), "$" + SparkIoRegistry.class.getSimpleName())); HadoopPools.initialize(GryoPool.build(). version(GryoVersion.valueOf(sparkConfiguration.get(GryoPool.CONFIG_IO_GRYO_VERSION, GryoPool.CONFIG_IO_GRYO_POOL_VERSION_DEFAULT.name()))). poolSize(sparkConfiguration.getInt(GryoPool.CONFIG_IO_GRYO_POOL_SIZE, GryoPool.CONFIG_IO_GRYO_POOL_SIZE_DEFAULT)). ioRegistries(ioRegistries). initializeMapper(builder -> builder.referenceTracking(this.referenceTracking). registrationRequired(this.registrationRequired)). create()); }