public byte[] serialize(String topic, Bytes data) { if (data == null) return null; return data.get(); }
public Bytes deserialize(String topic, byte[] data) { if (data == null) return null; return new Bytes(data); }
@Override public String toString() { return Bytes.toString(bytes, 0, bytes.length); }
@Override public boolean equals(Object other) { if (this == other) return true; if (other == null) return false; // we intentionally use the function to compute hashcode here if (this.hashCode() != other.hashCode()) return false; if (other instanceof Bytes) return Arrays.equals(this.bytes, ((Bytes) other).get()); return false; }
static Bytes lowerRange(final Bytes key, final byte[] minSuffix) { final byte[] bytes = key.get(); final ByteBuffer rangeStart = ByteBuffer.allocate(bytes.length + minSuffix.length); // any key in the range would start at least with the given prefix to be // in the range, and have at least SUFFIX_SIZE number of trailing zero bytes. // unless there is a maximum key length, you can keep appending more zero bytes // to keyFrom to create a key that will match the range, yet that would precede // KeySchema.toBinaryKey(keyFrom, from, 0) in byte order return Bytes.wrap( rangeStart .put(bytes) .put(minSuffix) .array() ); } }
@Override protected Object convertPayload(Message<?> message) { try { return Bytes.wrap(getObjectMapper().writeValueAsBytes(message.getPayload())); } catch (JsonProcessingException e) { throw new ConversionException("Failed to convert to JSON", e); } }
@Override public boolean hasNext(final KeyValueIterator<Bytes, ?> iterator) { while (iterator.hasNext()) { final Bytes bytes = iterator.peekNextKey(); final Bytes keyBytes = Bytes.wrap(WindowKeySchema.extractStoreKeyBytes(bytes.get())); final long time = WindowKeySchema.extractStoreTimestamp(bytes.get()); if ((binaryKeyFrom == null || keyBytes.compareTo(binaryKeyFrom) >= 0) && (binaryKeyTo == null || keyBytes.compareTo(binaryKeyTo) <= 0) && time >= from && time <= to) { return true; } iterator.next(); } return false; } };
@Override public int compare(final Bytes cacheKey, final Bytes storeKey) { return cacheKey.compareTo(storeKey); } }
private KeyValue<K, V> nextCacheValue(final Bytes nextCacheKey) { final KeyValue<Bytes, LRUCacheEntry> next = cacheIterator.next(); if (!next.key.equals(nextCacheKey)) { throw new IllegalStateException("Next record key is not the peeked key value; this should not happen"); } return KeyValue.pair(deserializeCacheKey(next.key), deserializeCacheValue(next.value)); }
static Bytes toStoreKeyBinary(final byte[] serializedKey, final long timestamp, final int seqnum) { final ByteBuffer buf = ByteBuffer.allocate(serializedKey.length + TIMESTAMP_SIZE + SEQNUM_SIZE); buf.put(serializedKey); buf.putLong(timestamp); buf.putInt(seqnum); return Bytes.wrap(buf.array()); }
@Override public int compareTo(final BufferKey o) { // ordering of keys within a time uses hashCode. final int timeComparison = Long.compare(time, o.time); return timeComparison == 0 ? key.compareTo(o.key) : timeComparison; } }
@Override public Optional<Formatter> maybeGetFormatter( final String topicName, final ConsumerRecord<String, Bytes> record, final KafkaAvroDeserializer avroDeserializer, final DateFormat dateFormat) { try { avroDeserializer.deserialize(topicName, record.value().get()); return Optional.of(createFormatter(topicName, avroDeserializer, dateFormat)); } catch (final Throwable t) { return Optional.empty(); } }
/** * Returns the upper byte range for a key with a given fixed size maximum suffix * * Assumes the minimum key length is one byte */ static Bytes upperRange(final Bytes key, final byte[] maxSuffix) { final byte[] bytes = key.get(); final ByteBuffer rangeEnd = ByteBuffer.allocate(bytes.length + maxSuffix.length); int i = 0; while (i < bytes.length && ( i < MIN_KEY_LENGTH // assumes keys are at least one byte long || (bytes[i] & 0xFF) >= (maxSuffix[0] & 0xFF) )) { rangeEnd.put(bytes[i++]); } rangeEnd.put(maxSuffix); rangeEnd.flip(); final byte[] res = new byte[rangeEnd.remaining()]; ByteBuffer.wrap(res).put(rangeEnd); return Bytes.wrap(res); }
synchronized void putAll(final List<KeyValue<byte[], LRUCacheEntry>> entries) { for (final KeyValue<byte[], LRUCacheEntry> entry : entries) { put(Bytes.wrap(entry.key), entry.value); } }
public static Bytes wrap(byte[] bytes) { if (bytes == null) return null; return new Bytes(bytes); }
@Override public Optional<Formatter> maybeGetFormatter( final String topicName, final ConsumerRecord<String, Bytes> record, final KafkaAvroDeserializer avroDeserializer, final DateFormat dateFormat) { try { JsonMapper.INSTANCE.mapper.readTree(record.value().toString()); return Optional.of(createFormatter()); } catch (final Throwable t) { return Optional.empty(); } }
@Override public boolean hasNext(final KeyValueIterator<Bytes, ?> iterator) { while (iterator.hasNext()) { final Bytes bytes = iterator.peekNextKey(); final Windowed<Bytes> windowedKey = SessionKeySchema.from(bytes); if ((binaryKeyFrom == null || windowedKey.key().compareTo(binaryKeyFrom) >= 0) && (binaryKeyTo == null || windowedKey.key().compareTo(binaryKeyTo) <= 0) && windowedKey.window().end() >= from && windowedKey.window().start() <= to) { return true; } iterator.next(); } return false; } };
value = ((Bytes) value).get();