@Override public Integer deserialize(ByteBuffer in) { return BytesUtil.readUnsigned(in, 4); }
private byte[] getByteArrayForShort(short v) { byte[] split = new byte[Bytes.SIZEOF_SHORT]; BytesUtil.writeUnsigned(v, split, 0, Bytes.SIZEOF_SHORT); return split; }
public static String[] readAsciiStringArray(ByteBuffer in) { int len = readVInt(in); String[] strs = new String[len]; for (int i = 0; i < len; i++) strs[i] = readAsciiString(in); return strs; }
public static void writeAsciiStringArray(String[] strs, ByteBuffer out) { writeVInt(strs.length, out); for (int i = 0; i < strs.length; i++) writeAsciiString(strs[i], out); }
@Override public void serialize(CoprocessorProjector value, ByteBuffer out) { BytesUtil.writeByteArray(value.groupByMask, out); BytesUtil.writeVInt(value.hasGroupby ? 1 : 0, out); }
@Override public List<ByteArray> reEncodeDictionary(List<ByteArray> value, MeasureDesc measureDesc, Map<TblColRef, Dictionary<String>> oldDicts, Map<TblColRef, Dictionary<String>> newDicts) { TblColRef colRef = getRawColumn(measureDesc.getFunction()); Dictionary<String> sourceDict = oldDicts.get(colRef); Dictionary<String> mergedDict = newDicts.get(colRef); int valueSize = value.size(); byte[] newIdBuf = new byte[valueSize * mergedDict.getSizeOfId()]; int bufOffset = 0; for (ByteArray c : value) { int oldId = BytesUtil.readUnsigned(c.array(), c.offset(), c.length()); int newId; String v = sourceDict.getValueFromId(oldId); if (v == null) { newId = mergedDict.nullId(); } else { newId = mergedDict.getIdFromValue(v); } BytesUtil.writeUnsigned(newId, newIdBuf, bufOffset, mergedDict.getSizeOfId()); c.reset(newIdBuf, bufOffset, mergedDict.getSizeOfId()); bufOffset += mergedDict.getSizeOfId(); } return value; } };
@Override public CoprocessorProjector deserialize(ByteBuffer in) { byte[] mask = BytesUtil.readByteArray(in); boolean hasGroupBy = BytesUtil.readVInt(in) == 1; return new CoprocessorProjector(mask, hasGroupBy); } };
@Override public GTInfo deserialize(ByteBuffer in) { IGTCodeSystem codeSystem = null; String codeSystemType = BytesUtil.readAsciiString(in); if (CubeCodeSystem.class.getCanonicalName().equals(codeSystemType)) { codeSystem = TrimmedCubeCodeSystem.serializer.deserialize(in); String newTableName = BytesUtil.readUTFString(in); int colTypesSize = BytesUtil.readVInt(in); DataType[] newColTypes = new DataType[colTypesSize]; for (int i = 0; i < colTypesSize; ++i) { ImmutableBitSet newPrimaryKey = ImmutableBitSet.serializer.deserialize(in); int colBlockSize = BytesUtil.readVInt(in); ImmutableBitSet[] newColBlocks = new ImmutableBitSet[colBlockSize]; for (int i = 0; i < colBlockSize; ++i) { int newRowBlockSize = BytesUtil.readVInt(in);
int sRangesCount = BytesUtil.readVInt(in); for (int rangeIdx = 0; rangeIdx < sRangesCount; rangeIdx++) { GTRecord sPkStart = deserializeGTRecord(in, sInfo); GTRecord sPkEnd = deserializeGTRecord(in, sInfo); List<GTRecord> sFuzzyKeys = Lists.newArrayList(); int sFuzzyKeySize = BytesUtil.readVInt(in); for (int i = 0; i < sFuzzyKeySize; i++) { sFuzzyKeys.add(deserializeGTRecord(in, sInfo)); TupleFilter sGTFilter = GTUtil.deserializeGTFilter(BytesUtil.readByteArray(in), sInfo); sGTHavingFilter = TupleFilterSerializer.deserialize(BytesUtil.readByteArray(in), StringCodeSystem.INSTANCE); String[] sAggrMetricFuncs = BytesUtil.readAsciiStringArray(in); boolean sAllowPreAggr = (BytesUtil.readVInt(in) == 1); double sAggrCacheGB = in.getDouble(); StorageLimitLevel storageLimitLevel = StorageLimitLevel.valueOf(BytesUtil.readUTFString(in)); int storageScanRowNumThreshold = BytesUtil.readVInt(in); int storagePushDownLimit = BytesUtil.readVInt(in); long startTime = BytesUtil.readVLong(in); long timeout = BytesUtil.readVLong(in); String storageBehavior = BytesUtil.readUTFString(in); int nTupleExprs = BytesUtil.readVInt(in); Map<Integer, TupleExpression> sTupleExpressionMap = Maps.newHashMapWithExpectedSize(nTupleExprs); for (int i = 0; i < nTupleExprs; i++) { int sC = BytesUtil.readVInt(in); TupleExpression sTupleExpr = TupleExpressionSerializer.deserialize(BytesUtil.readByteArray(in),
@Override public void serialize(GTInfo value, ByteBuffer out) { if (value.codeSystem instanceof CubeCodeSystem) { BytesUtil.writeAsciiString(CubeCodeSystem.class.getCanonicalName(), out); TrimmedCubeCodeSystem trimmed = ((CubeCodeSystem) value.codeSystem).trimForCoprocessor(); TrimmedCubeCodeSystem.serializer.serialize(trimmed, out); } else if (value.codeSystem != null) { BytesUtil.writeAsciiString(value.codeSystem.getClass().getCanonicalName(), out); BytesSerializer<IGTCodeSystem> serializer = null; try { BytesUtil.writeUTFString(value.tableName, out); BytesUtil.writeVInt(value.colTypes.length, out); for (DataType dataType : value.colTypes) { DataType.serializer.serialize(dataType, out); BytesUtil.writeVInt(value.colBlocks.length, out); for (ImmutableBitSet x : value.colBlocks) { ImmutableBitSet.serializer.serialize(x, out); BytesUtil.writeVInt(value.rowBlockSize, out);
@Override public void serialize(EndpointAggregators value, ByteBuffer out) { BytesUtil.writeAsciiStringArray(value.funcNames, out); BytesUtil.writeAsciiStringArray(value.dataTypes, out); BytesUtil.writeVInt(value.metricInfos.length, out); for (int i = 0; i < value.metricInfos.length; ++i) { MetricInfo metricInfo = value.metricInfos[i]; BytesUtil.writeAsciiString(metricInfo.type.toString(), out); BytesUtil.writeVInt(metricInfo.refIndex, out); BytesUtil.writeVInt(metricInfo.precision, out); } BytesUtil.writeByteArray(TableRecordInfoDigest.serialize(value.tableRecordInfoDigest), out); }
@Override public EndpointAggregators deserialize(ByteBuffer in) { String[] funcNames = BytesUtil.readAsciiStringArray(in); String[] dataTypes = BytesUtil.readAsciiStringArray(in); int metricInfoLength = BytesUtil.readVInt(in); MetricInfo[] infos = new MetricInfo[metricInfoLength]; for (int i = 0; i < infos.length; ++i) { MetricType type = MetricType.valueOf(BytesUtil.readAsciiString(in)); int refIndex = BytesUtil.readVInt(in); int presision = BytesUtil.readVInt(in); infos[i] = new MetricInfo(type, refIndex, presision); } byte[] temp = BytesUtil.readByteArray(in); TableRecordInfoDigest tableInfo = TableRecordInfoDigest.deserialize(temp); return new EndpointAggregators(funcNames, dataTypes, infos, tableInfo); }
public byte[] getFirstValue() { int nodeOffset = headSize; ByteArrayOutputStream bytes = new ByteArrayOutputStream(); while (true) { int valueLen = BytesUtil.readUnsigned(trieBytes, nodeOffset + firstByteOffset - 1, 1); bytes.write(trieBytes, nodeOffset + firstByteOffset, valueLen); if (checkFlag(nodeOffset, BIT_IS_END_OF_VALUE)) { break; } nodeOffset = headSize + (int) (BytesUtil.readLong(trieBytes, nodeOffset, sizeChildOffset) & childOffsetMask); if (nodeOffset == headSize) { break; } } return bytes.toByteArray(); }
@Override public void serialize(List<ByteArray> values, ByteBuffer out) { if (values == null) { BytesUtil.writeVInt(0, out); } else { BytesUtil.writeVInt(values.size(), out); for (ByteArray array : values) { if (!out.hasRemaining() || out.remaining() < array.length()) { throw new RuntimeException("BufferOverflow! Please use one higher cardinality column for dimension column when build RAW cube!"); } BytesUtil.writeByteArray(BytesUtil.subarray(array.array(), array.offset(), array.offset() + array.length()), out); } } }
private int getChildOffset(int n) { long offset = headSize + (BytesUtil.readLong(trieBytes, n, sizeChildOffset) & childOffsetMask); assert offset < trieBytes.length; return (int) offset; }
@Override public void serialize(Long value, ByteBuffer out) { BytesUtil.writeLong(value, out); }
@Override public void serialize(TableRecordInfoDigest value, ByteBuffer out) { BytesUtil.writeVInt(value.nColumns, out); BytesUtil.writeVInt(value.byteFormLen, out); BytesUtil.writeIntArray(value.offsets, out); BytesUtil.writeIntArray(value.dictMaxIds, out); BytesUtil.writeIntArray(value.lengths, out); BytesUtil.writeBooleanArray(value.isMetric, out); for (int i = 0; i < value.measureSerializers.length; ++i) { if (value.isMetrics(i)) { BytesUtil.writeAsciiString(value.measureSerializers[i] .getDataType().toString(), out); } else { BytesUtil.writeAsciiString(null, out); } } }
@Override public TableRecordInfoDigest deserialize(ByteBuffer in) { TableRecordInfoDigest result = new TableRecordInfoDigest(); result.nColumns = BytesUtil.readVInt(in); result.byteFormLen = BytesUtil.readVInt(in); result.offsets = BytesUtil.readIntArray(in); result.dictMaxIds = BytesUtil.readIntArray(in); result.lengths = BytesUtil.readIntArray(in); result.isMetric = BytesUtil.readBooleanArray(in); result.measureSerializers = new FixedLenMeasureCodec<?>[result.nColumns]; for (int i = 0; i < result.nColumns; ++i) { String typeStr = BytesUtil.readAsciiString(in); if (typeStr == null) { result.measureSerializers[i] = null; } else { result.measureSerializers[i] = FixedLenMeasureCodec .get(DataType.getInstance(typeStr)); } } return result; }