@Override public Tuple0 deserialize(DataInputView source) throws IOException { source.readByte(); return Tuple0.INSTANCE; }
@Override public FlinkKafkaProducer.KafkaTransactionState deserialize(DataInputView source) throws IOException { String transactionalId = null; if (source.readBoolean()) { transactionalId = source.readUTF(); } long producerId = source.readLong(); short epoch = source.readShort(); return new FlinkKafkaProducer.KafkaTransactionState(transactionalId, producerId, epoch, null); }
private boolean deserializeNull(DataInputView source) throws IOException { boolean isNull = source.readBoolean(); if (isNull) { source.skipBytesToRead(padding.length); } return isNull; }
@Override public FlinkKafkaProducer.KafkaTransactionContext deserialize(DataInputView source) throws IOException { int numIds = source.readInt(); Set<String> ids = new HashSet<>(numIds); for (int i = 0; i < numIds; i++) { ids.add(source.readUTF()); } return new FlinkKafkaProducer.KafkaTransactionContext(ids); }
private static void skipCondition(DataInputView in) throws IOException, ClassNotFoundException { boolean hasCondition = in.readBoolean(); if (hasCondition) { int length = in.readInt(); byte[] serCondition = new byte[length]; in.read(serCondition); ByteArrayInputStream bais = new ByteArrayInputStream(serCondition); ObjectInputStream ois = new ObjectInputStream(bais); ois.readObject(); ois.close(); bais.close(); } }
@Override public Map<K, V> deserialize(DataInputView source) throws IOException { final int size = source.readInt(); final Map<K, V> map = new HashMap<>(size); for (int i = 0; i < size; ++i) { K key = keySerializer.deserialize(source); boolean isNull = source.readBoolean(); V value = isNull ? null : valueSerializer.deserialize(source); map.put(key, value); } return map; }
@Override public void read(DataInputView in) throws IOException { this.value = in.readUTF(); }
@Override public void write(DataInputView source, int numBytes) throws IOException { if (numBytes > this.end - this.position) { throw new IOException("Could not write " + numBytes + " bytes since the buffer is full."); } source.readFully(this.memory,this.position, numBytes); this.position += numBytes; } }
@Override public MigratedNFA<T> deserialize(DataInputView source) throws IOException { MigrationUtils.skipSerializedStates(source); source.readLong(); source.readBoolean(); org.apache.flink.cep.nfa.SharedBuffer<T> sharedBuffer = sharedBufferSerializer.deserialize(source); Queue<ComputationState> computationStates = deserializeComputationStates(sharedBuffer, eventSerializer, source); return new MigratedNFA<>(computationStates, sharedBuffer); }
@Override public int compareSerialized(DataInputView firstSource, DataInputView secondSource) throws IOException { final int fs = firstSource.readBoolean() ? 1 : 0; final int ss = secondSource.readBoolean() ? 1 : 0; int comp = fs - ss; return ascendingComparison ? comp : -comp; }
@Override public void read(DataInputView in) throws IOException { this.value = in.readShort(); }
@Override public int read(byte[] b, int off, int len) throws IOException { return inputView.read(b, off, len); } }