@Override public void initChannel(SocketChannel ch) throws Exception { ch.pipeline().addLast( new ProtobufVarint32FrameDecoder(), new ProtobufDecoder(LlapOutputSocketInitMessage.getDefaultInstance()), new StringEncoder(), new LlapOutputFormatServiceHandler(sendBufferSize)); } }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage)) { return super.equals(obj); } org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage) obj; boolean result = true; result = result && (hasFragmentId() == other.hasFragmentId()); if (hasFragmentId()) { result = result && getFragmentId() .equals(other.getFragmentId()); } result = result && (hasToken() == other.hasToken()); if (hasToken()) { result = result && getToken() .equals(other.getToken()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
LlapOutputSocketInitMessage.newBuilder() .setFragmentId(id).build().writeDelimitedTo(socketStream); socketStream.flush();
OutputStream socketStream = socket.getOutputStream(); LlapOutputSocketInitMessage.Builder builder = LlapOutputSocketInitMessage.newBuilder().setFragmentId(fragmentId); if (llapSplit.getTokenBytes() != null) { builder.setToken(ByteString.copyFrom(llapSplit.getTokenBytes())); builder.build().writeDelimitedTo(socketStream); socketStream.flush();
OutputStream socketStream = socket.getOutputStream(); LlapOutputSocketInitMessage.Builder builder = LlapOutputSocketInitMessage.newBuilder().setFragmentId(fragmentId); if (llapSplit.getTokenBytes() != null) { builder.setToken(ByteString.copyFrom(llapSplit.getTokenBytes())); builder.build().writeDelimitedTo(socketStream); socketStream.flush();
@Test public void testBadClientMessage() throws Exception { JobConf job = new JobConf(); String id = "foobar"; job.set(LlapOutputFormat.LLAP_OF_ID_KEY, id); LlapOutputFormat format = new LlapOutputFormat(); Socket socket = new Socket("localhost", service.getPort()); LOG.debug("Socket connected"); OutputStream socketStream = socket.getOutputStream(); LlapOutputSocketInitMessage.newBuilder() .setFragmentId(id).build().writeDelimitedTo(socketStream); LlapOutputSocketInitMessage.newBuilder() .setFragmentId(id).build().writeDelimitedTo(socketStream); socketStream.flush(); Thread.sleep(3000); LOG.debug("Data written"); try { format.getRecordWriter(null, job, null, null); Assert.fail("Didn't throw"); } catch (IOException ex) { // Expected. } } }
public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getFragmentIdBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, token_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasFragmentId()) { hash = (37 * hash) + FRAGMENT_ID_FIELD_NUMBER; hash = (53 * hash) + getFragmentId().hashCode(); } if (hasToken()) { hash = (37 * hash) + TOKEN_FIELD_NUMBER; hash = (53 * hash) + getToken().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
@Override public void channelRead0(ChannelHandlerContext ctx, LlapOutputSocketInitMessage msg) { String id = msg.getFragmentId(); byte[] tokenBytes = msg.hasToken() ? msg.getToken().toByteArray() : null; try { registerReader(ctx, id, tokenBytes); } catch (Throwable t) { // Make sure we fail the channel if something goes wrong. // We internally handle all the "expected" exceptions, so log a lot of information here. failChannel(ctx, id, StringUtils.stringifyException(t)); } }
public Builder toBuilder() { return newBuilder(this); }
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage buildPartial() { org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.fragmentId_ = fragmentId_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.token_ = token_; result.bitField0_ = to_bitField0_; onBuilt(); return result; }
public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage prototype) {
public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); }
public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage getDefaultInstanceForType() { return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage.getDefaultInstance(); }
public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage other) { if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage.getDefaultInstance()) return this; if (other.hasFragmentId()) { bitField0_ |= 0x00000001; fragmentId_ = other.fragmentId_; onChanged(); } if (other.hasToken()) { setToken(other.getToken()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
@Override public void channelRead0(ChannelHandlerContext ctx, LlapOutputSocketInitMessage msg) { String id = msg.getFragmentId(); byte[] tokenBytes = msg.hasToken() ? msg.getToken().toByteArray() : null; try { registerReader(ctx, id, tokenBytes); } catch (Throwable t) { // Make sure we fail the channel if something goes wrong. // We internally handle all the "expected" exceptions, so log a lot of information here. failChannel(ctx, id, StringUtils.stringifyException(t)); } }
@Override public void initChannel(SocketChannel ch) throws Exception { ch.pipeline().addLast( new ProtobufVarint32FrameDecoder(), new ProtobufDecoder(LlapOutputSocketInitMessage.getDefaultInstance()), new StringEncoder(), new LlapOutputFormatServiceHandler(sendBufferSize)); } }
/** * <code>optional bytes token = 2;</code> */ public Builder clearToken() { bitField0_ = (bitField0_ & ~0x00000002); token_ = getDefaultInstance().getToken(); onChanged(); return this; }
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getFragmentIdBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, token_); } getUnknownFields().writeTo(output); }
/** * <code>required string fragment_id = 1;</code> */ public Builder clearFragmentId() { bitField0_ = (bitField0_ & ~0x00000001); fragmentId_ = getDefaultInstance().getFragmentId(); onChanged(); return this; } /**