private static byte[] ensureSize(byte[] buff, int len) { return buff == null || buff.length < len ? Utils.newBytes(len) : buff; }
/** * Read a byte array. * * @return the value */ public byte[] readBytes() throws IOException { int len = readInt(); if (len == -1) { return null; } byte[] b = Utils.newBytes(len); in.readFully(b); return b; }
@Override public Object read(ByteBuffer buff, int tag) { int len = DataUtils.readVarInt(buff); byte[] data = Utils.newBytes(len); int size = data.length * 2; // adjust the average size // using an exponential moving average averageSize = (size + 15 * averageSize) / 16; buff.get(data); return deserialize(data); }
@Override public Object read(ByteBuffer buff, int tag) { switch (tag) { case TAG_BIG_INTEGER_0: return BigInteger.ZERO; case TAG_BIG_INTEGER_1: return BigInteger.ONE; case TAG_BIG_INTEGER_SMALL: return BigInteger.valueOf(DataUtils.readVarLong(buff)); } int len = DataUtils.readVarInt(buff); byte[] bytes = Utils.newBytes(len); buff.get(bytes); return new BigInteger(bytes); }
@Override public Object read(ByteBuffer buff, int tag) { switch (tag) { case TAG_BIG_DECIMAL_0: return BigDecimal.ZERO; case TAG_BIG_DECIMAL_1: return BigDecimal.ONE; case TAG_BIG_DECIMAL_SMALL: return BigDecimal.valueOf(DataUtils.readVarLong(buff)); case TAG_BIG_DECIMAL_SMALL_SCALED: int scale = DataUtils.readVarInt(buff); return BigDecimal.valueOf(DataUtils.readVarLong(buff), scale); } int scale = DataUtils.readVarInt(buff); int len = DataUtils.readVarInt(buff); byte[] bytes = Utils.newBytes(len); buff.get(bytes); BigInteger b = new BigInteger(bytes); return new BigDecimal(b, scale); }
byte[] data; int len = tag - TAG_BYTE_ARRAY_0_15; data = Utils.newBytes(len); buff.get(data); return data;
} else if (text) { byte[] data = Utils.newBytes(paramLen); readFully(data); String str = new String(data, getEncoding()); break; case PgServer.PG_TYPE_BYTEA: byte[] d1 = Utils.newBytes(paramLen); readFully(d1); prep.setBytes(col, d1); default: server.trace("Binary format for type: "+pgType+" is unsupported"); byte[] d2 = Utils.newBytes(paramLen); readFully(d2); prep.setString(col, new String(d2, getEncoding()));
byte[] bytes = Utils.newBytes(len); for (int pos = 0; pos < len;) { pos += input.read(bytes, pos, len - pos);
int lenAdd = DataUtils.readVarInt(chunk); int compLen = pageSize + start - chunk.position(); byte[] comp = Utils.newBytes(compLen); chunk.get(comp); int l = compLen + lenAdd;
/** * Expands the compressed data. * * @param in the byte array with the compressed data * @return the uncompressed data */ public byte[] expand(byte[] in) { int algorithm = in[0]; Compressor compress = getCompressor(algorithm); try { int len = readVariableInt(in, 1); int start = 1 + getVariableIntLength(len); byte[] buff = Utils.newBytes(len); compress.expand(in, start, in.length - start, buff, 0, len); return buff; } catch (Exception e) { throw DbException.get(ErrorCode.COMPRESSION_ERROR, e); } }
byte[] comp = Utils.newBytes(compLen); buff.get(comp); int l = compLen + lenAdd;
len = buff.length; } else { buff = Utils.newBytes(len); len = IOUtils.readFully(in, buff, len);
int scale = readVarInt(buff); int len = readVarInt(buff); byte[] buff2 = Utils.newBytes(len); buff.get(buff2, 0, len); BigInteger b = new BigInteger(buff2); byte[] b = Utils.newBytes(len); buff.get(b, 0, len); return ValueBytes.getNoCopy(b); byte[] b = Utils.newBytes(len); buff.get(b, 0, len); return ValueJavaObject.getNoCopy(null, b, handler); int smallLen = readVarInt(buff); if (smallLen >= 0) { byte[] small = Utils.newBytes(smallLen); buff.get(small, 0, smallLen); return ValueLobDb.createSmallLob(type, small); byte[] b = Utils.newBytes(len); buff.get(b, 0, len); return ValueGeometry.get(b); int customType = readVarInt(buff); int len = readVarInt(buff); byte[] b = Utils.newBytes(len); buff.get(b, 0, len); return JdbcUtils.customDataTypesHandler.convert(
/** * Store the lob data to a file if the size of the buffer is larger than the * maximum size for an in-place lob. * * @param h the data handler */ public void convertToFileIfRequired(DataHandler h) { try { if (small != null && small.length > h.getMaxLengthInplaceLob()) { boolean compress = h.getLobCompressionAlgorithm(type) != null; int len = getBufferSize(h, compress, Long.MAX_VALUE); int tabId = tableId; if (type == Value.BLOB) { createFromStream( Utils.newBytes(len), 0, getInputStream(), Long.MAX_VALUE, h); } else { createFromReader( new char[len], 0, getReader(), Long.MAX_VALUE, h); } Value v2 = copy(h, tabId); if (SysProperties.CHECK && v2 != this) { DbException.throwInternalError(v2.toString()); } } } catch (IOException e) { throw DbException.convertIOException(e, null); } }
len = buff.length; } else { buff = Utils.newBytes(len); len = IOUtils.readFully(in, buff, len);
byte[] data = Utils.newBytes(len); dataInRaw.readFully(data, 0, len); dataIn = new DataInputStream(new ByteArrayInputStream(data, 0, len));
if (compress != null) { int uncompressed = page.readInt(); byte[] buff = Utils.newBytes(remainingInBuffer); page.read(buff, 0, remainingInBuffer); page.reset();
@Override public Object read(ByteBuffer buff, int tag) { int len = DataUtils.readVarInt(buff); byte[] data = Utils.newBytes(len); int size = data.length * 2; // adjust the average size // using an exponential moving average averageSize = (size + 15 * averageSize) / 16; buff.get(data); return deserialize(data); }
@Override public AlleleProperties read(ByteBuffer buff) { int len = DataUtils.readVarInt(buff); byte[] data = Utils.newBytes(len); buff.get(data); try { return AlleleProperties.parseFrom(data); } catch (InvalidProtocolBufferException e) { throw new InvalidAlleleProtoException(e); } }