@Nullable @Override public byte[] convert(Binary source) { return source.getData(); } }
public Builder initialTimestamp(Binary initialTimestamp) { this.initialTimestamp = new Timestamp.GTID(initialTimestamp.getData(), null); return this; }
/** * deserializes DBBinary/byte[] to object * * @param data the data to read * @param zipped true if the data is compressed * @return the deserialized object * @throws IOException thrown when an error is encountered reading the data * @throws ClassNotFoundException thrown if the Class definition can not be found */ public static Object deserialize(final Object data, final boolean zipped) throws IOException, ClassNotFoundException { final ByteArrayInputStream bais; if (data instanceof Binary) { bais = new ByteArrayInputStream(((Binary) data).getData()); } else { bais = new ByteArrayInputStream((byte[]) data); } InputStream is = bais; try { if (zipped) { is = new GZIPInputStream(is); } final ObjectInputStream ois = new ObjectInputStream(is); return ois.readObject(); } finally { is.close(); } }
/** * Fills the map with the values from the DBObject. * * @param resultMap * The map to fill/ * @param obj * The object to copy values from. */ protected void fillMap(Map<String, ByteIterator> resultMap, Document obj) { for (Map.Entry<String, Object> entry : obj.entrySet()) { if (entry.getValue() instanceof Binary) { resultMap.put(entry.getKey(), new ByteArrayByteIterator(((Binary) entry.getValue()).getData())); } } } }
@Override public void serialize(final Object obj, final StringBuilder buf) { Binary bin = (Binary) obj; serialize(bin.getData(), bin.getType(), buf); }
@Override public void encode(final BsonWriter writer, final Binary value, final EncoderContext encoderContext) { writer.writeBinaryData(new BsonBinary(value.getType(), value.getData())); }
result = new Timestamp.GTID(((Binary) id).getData(), (Date) timestamp); } else if (id instanceof byte[]) { result = new Timestamp.GTID((byte[]) id, (Date) timestamp);
/** * Encodes a Binary field * * @param name the field name * @param binary the value * @see org.bson.BsonType#BINARY */ protected void putBinary(final String name, final Binary binary) { putName(name); bsonWriter.writeBinaryData(new BsonBinary(binary.getType(), binary.getData())); }
private void writeSlice(BlockBuilder output, Type type, Object value) { String base = type.getTypeSignature().getBase(); if (base.equals(StandardTypes.VARCHAR)) { type.writeSlice(output, utf8Slice(toVarcharValue(value))); } else if (type.equals(OBJECT_ID)) { type.writeSlice(output, wrappedBuffer(((ObjectId) value).toByteArray())); } else if (type.equals(VARBINARY)) { if (value instanceof Binary) { type.writeSlice(output, wrappedBuffer(((Binary) value).getData())); } else { output.appendNull(); } } else { throw new PrestoException(GENERIC_INTERNAL_ERROR, "Unhandled type for Slice: " + type.getTypeSignature()); } }
private byte[] getBufferFromChunk(@Nullable final Document chunk, final int expectedChunkIndex) { if (chunk == null || chunk.getInteger("n") != expectedChunkIndex) { throw new MongoGridFSException(format("Could not find file chunk for file_id: %s at chunk index %s.", fileId, expectedChunkIndex)); } if (!(chunk.get("data") instanceof Binary)) { throw new MongoGridFSException("Unexpected data format for the chunk"); } byte[] data = chunk.get("data", Binary.class).getData(); long expectedDataLength = 0; boolean extraChunk = false; if (expectedChunkIndex + 1 > numberOfChunks) { extraChunk = true; } else if (expectedChunkIndex + 1 == numberOfChunks) { expectedDataLength = length - (expectedChunkIndex * (long) chunkSizeInBytes); } else { expectedDataLength = chunkSizeInBytes; } if (extraChunk && data.length > expectedDataLength) { throw new MongoGridFSException(format("Extra chunk data for file_id: %s. Unexpected chunk at chunk index %s." + "The size was %s and it should be %s bytes.", fileId, expectedChunkIndex, data.length, expectedDataLength)); } else if (data.length != expectedDataLength) { throw new MongoGridFSException(format("Chunk size data length is not the expected size. " + "The size was %s for file_id: %s chunk index %s it should be %s bytes.", data.length, fileId, expectedChunkIndex, expectedDataLength)); } return data; }
@Nullable @Override public byte[] convert(Binary source) { return source.getData(); } }
@Override public Message<?> convert(Binary source) { return (Message<?>) this.deserializingConverter.convert(source.getData()); }
@Override public Object convert(Object source, TypeDescriptor sourceType, TypeDescriptor targetType) { if (source == null) { return null; } if (Message.class.isAssignableFrom(sourceType.getObjectType())) { return new Binary(this.serializingConverter.convert(source)); } else { return this.deserializingConverter.convert(((Binary) source).getData()); } }
@Override public ErrorMessage convert(Document source) { @SuppressWarnings("unchecked") Map<String, Object> headers = MongoDbMessageStore.this.converter.normalizeHeaders((Map<String, Object>) source.get("headers")); Object payload = this.deserializingConverter.convert(((Binary) source.get("payload")).getData()); ErrorMessage message = new ErrorMessage((Throwable) payload, headers); // NOSONAR not null enhanceHeaders(message.getHeaders(), headers); return message; }
private int determineBinaryParameterIndex(Binary value) { byte [] bytes = value.getData(); byte lastByte = bytes[6]; return (lastByte + 48)/4; }
@Override public void serialize(final Object obj, final StringBuilder buf) { Binary bin = (Binary) obj; serialize(bin.getData(), bin.getType(), buf); }
@Override public X extract(Tuple resultset, String name) { final Binary result = (Binary) resultset.get( name ); if ( result == null ) { return null; } else { byte[] data = result.getData(); return javaTypeDescriptor.wrap( data, null ); } } };
@Override public X extract(Tuple resultset, String name) { final Binary result = (Binary) resultset.get( name ); if ( result == null ) { return null; } else { byte[] data = result.getData(); return javaTypeDescriptor.wrap( data, null ); } } };
/** * Initializes a Saga entry using a DBObject containing the Mongo Document * * @param dbSaga The mongo Document containing the serialized saga */ public SagaEntry(Document dbSaga) { this.sagaId = (String) dbSaga.get(SAGA_IDENTIFIER); this.serializedSaga = ((Binary) dbSaga.get(SERIALIZED_SAGA)).getData(); this.sagaType = (String) dbSaga.get(SAGA_TYPE); this.associationValues = toAssociationSet(dbSaga); }
@Override public ErrorMessage convert(Document source) { @SuppressWarnings("unchecked") Map<String, Object> headers = MongoDbMessageStore.this.converter.normalizeHeaders((Map<String, Object>) source.get("headers")); Object payload = this.deserializingConverter.convert(((Binary) source.get("payload")).getData()); ErrorMessage message = new ErrorMessage((Throwable) payload, headers); // NOSONAR not null enhanceHeaders(message.getHeaders(), headers); return message; }