private static int setStreamDataToCache( LlapSerDeDataBuffer[][] newCacheDataForCol, CacheWriter.CacheStreamData stream) { int streamIx = stream.name.getKind().getNumber(); // This is kinda hacky - we "know" these are LlaSerDeDataBuffer-s. newCacheDataForCol[streamIx] = stream.data.toArray(new LlapSerDeDataBuffer[stream.data.size()]); return streamIx; }
ecb.setStreamData(ctx.colIx, sctx.kind.getNumber(), cb);
ecb.setStreamData(ctx.colIx, sctx.kind.getNumber(), cb); } catch (Exception ex) { DiskRangeList drl = toRead == null ? null : toRead.next;
public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeEnumSize(1, kind_.getNumber()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(2, column_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(3, length_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; }
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeEnum(1, kind_.getNumber()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt32(2, column_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(3, length_); } getUnknownFields().writeTo(output); }
private static int setStreamDataToCache( LlapSerDeDataBuffer[][] newCacheDataForCol, CacheWriter.CacheStreamData stream) { int streamIx = stream.name.getKind().getNumber(); // This is kinda hacky - we "know" these are LlaSerDeDataBuffer-s. newCacheDataForCol[streamIx] = stream.data.toArray(new LlapSerDeDataBuffer[stream.data.size()]); return streamIx; }
@Override public int hashCode() { return column * 101 + kind.getNumber(); } }