@Override public void write(@Nonnull Block block, @Nonnull ProvidedStorageLocation providedStorageLocation) throws IOException { if (block == null || providedStorageLocation == null) { throw new IOException("Provided block and location cannot be null"); } WriteRequestProto request = WriteRequestProto .newBuilder() .setKeyValuePair(KeyValueProto.newBuilder() .setKey(PBHelperClient.convert(block)) .setValue(PBHelperClient.convert(providedStorageLocation)) .build()) .build(); try { rpcProxy.write(null, request); } catch (ServiceException e) { throw ProtobufHelper.getRemoteException(e); } }
/** * <code>required .hadoop.hdfs.KeyValueProto keyValuePair = 1;</code> */ public Builder mergeKeyValuePair(org.apache.hadoop.hdfs.protocol.proto.AliasMapProtocolProtos.KeyValueProto value) { if (keyValuePairBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && keyValuePair_ != org.apache.hadoop.hdfs.protocol.proto.AliasMapProtocolProtos.KeyValueProto.getDefaultInstance()) { keyValuePair_ = org.apache.hadoop.hdfs.protocol.proto.AliasMapProtocolProtos.KeyValueProto.newBuilder(keyValuePair_).mergeFrom(value).buildPartial(); } else { keyValuePair_ = value; } onChanged(); } else { keyValuePairBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /**
public Builder toBuilder() { return newBuilder(this); }
public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.AliasMapProtocolProtos.KeyValueProto prototype) {
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.AliasMapProtocolProtos.KeyValueProto prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); }
public static KeyValueProto convert(FileRegion fileRegion) { return KeyValueProto .newBuilder() .setKey(PBHelperClient.convert(fileRegion.getBlock())) .setValue(PBHelperClient.convert( fileRegion.getProvidedStorageLocation())) .build(); }