Codota Logo
DataTransferProtos$BaseHeaderProto
Code IndexAdd Codota to your IDE (free)

How to use
DataTransferProtos$BaseHeaderProto
in
org.apache.hadoop.hdfs.protocol.proto

Best Java code snippets using org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos$BaseHeaderProto (Showing top 20 results out of 315)

  • Common ways to obtain DataTransferProtos$BaseHeaderProto
private void myMethod () {
DataTransferProtos$BaseHeaderProto d =
  • Codota IconDataTransferProtos.BaseHeaderProto$Builder dataTransferProtosBaseHeaderProto$Builder;dataTransferProtosBaseHeaderProto$Builder.build()
  • Codota IconDataTransferProtos.BaseHeaderProto$Builder dataTransferProtosBaseHeaderProto$Builder;dataTransferProtosBaseHeaderProto$Builder.buildPartial()
  • Codota IconObject object;(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto) object
  • Smart code suggestions by Codota
}
origin: org.apache.hadoop/hadoop-hdfs

  proto.getClass().getSimpleName());
try {
 writeBlock(PBHelperClient.convert(proto.getHeader().getBaseHeader().getBlock()),
   PBHelperClient.convertStorageType(proto.getStorageType()),
   PBHelperClient.convert(proto.getHeader().getBaseHeader().getToken()),
   proto.getHeader().getClientName(),
   targets,
origin: apache/hbase

blockCopy.setNumBytes(locatedBlock.getBlockSize());
ClientOperationHeaderProto header = ClientOperationHeaderProto.newBuilder()
  .setBaseHeader(BaseHeaderProto.newBuilder().setBlock(PB_HELPER.convert(blockCopy))
    .setToken(PB_HELPER.convert(locatedBlock.getBlockToken())))
  .setClientName(clientName).build();
origin: org.apache.hadoop/hadoop-hdfs-client

public org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto buildPartial() {
 org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto result = new org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto(this);
 int from_bitField0_ = bitField0_;
 int to_bitField0_ = 0;
origin: io.prestosql.hadoop/hadoop-apache

public static TraceScope continueTraceSpan(BaseHeaderProto header,
  String description) {
 return continueTraceSpan(header.getTraceInfo(), description);
}
origin: io.prestosql.hadoop/hadoop-apache

static BaseHeaderProto buildBaseHeader(ExtendedBlock blk,
  Token<BlockTokenIdentifier> blockToken) {
 BaseHeaderProto.Builder builder =  BaseHeaderProto.newBuilder()
  .setBlock(PBHelper.convert(blk))
  .setToken(PBHelper.convert(blockToken));
 if (Trace.isTracing()) {
  Span s = Trace.currentSpan();
  builder.setTraceInfo(DataTransferTraceInfoProto.newBuilder()
    .setTraceId(s.getTraceId())
    .setParentId(s.getSpanId()));
 }
 return builder.build();
}
origin: ch.cern.hadoop/hadoop-hdfs

/** Receive OP_REPLACE_BLOCK */
private void opReplaceBlock(DataInputStream in) throws IOException {
 OpReplaceBlockProto proto = OpReplaceBlockProto.parseFrom(vintPrefixed(in));
 TraceScope traceScope = continueTraceSpan(proto.getHeader(),
   proto.getClass().getSimpleName());
 try {
  replaceBlock(PBHelper.convert(proto.getHeader().getBlock()),
    PBHelper.convertStorageType(proto.getStorageType()),
    PBHelper.convert(proto.getHeader().getToken()),
    proto.getDelHint(),
    PBHelper.convert(proto.getSource()));
 } finally {
  if (traceScope != null) traceScope.close();
 }
}
origin: io.prestosql.hadoop/hadoop-apache

/** Receive OP_REPLACE_BLOCK */
private void opReplaceBlock(DataInputStream in) throws IOException {
 OpReplaceBlockProto proto = OpReplaceBlockProto.parseFrom(vintPrefixed(in));
 TraceScope traceScope = continueTraceSpan(proto.getHeader(),
   proto.getClass().getSimpleName());
 try {
  replaceBlock(PBHelper.convert(proto.getHeader().getBlock()),
    PBHelper.convertStorageType(proto.getStorageType()),
    PBHelper.convert(proto.getHeader().getToken()),
    proto.getDelHint(),
    PBHelper.convert(proto.getSource()));
 } finally {
  if (traceScope != null) traceScope.close();
 }
}
origin: ch.cern.hadoop/hadoop-hdfs

public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto prototype) {
 return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
origin: io.prestosql.hadoop/hadoop-apache

public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto prototype) {
origin: ch.cern.hadoop/hadoop-hdfs

public Builder toBuilder() { return newBuilder(this); }
origin: io.prestosql.hadoop/hadoop-apache

public Builder toBuilder() { return newBuilder(this); }
origin: org.apache.hadoop/hadoop-hdfs-client

public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto prototype) {
origin: ch.cern.hadoop/hadoop-hdfs

/** Receive {@link Op#TRANSFER_BLOCK} */
private void opTransferBlock(DataInputStream in) throws IOException {
 final OpTransferBlockProto proto =
  OpTransferBlockProto.parseFrom(vintPrefixed(in));
 final DatanodeInfo[] targets = PBHelper.convert(proto.getTargetsList());
 TraceScope traceScope = continueTraceSpan(proto.getHeader(),
   proto.getClass().getSimpleName());
 try {
  transferBlock(PBHelper.convert(proto.getHeader().getBaseHeader().getBlock()),
    PBHelper.convert(proto.getHeader().getBaseHeader().getToken()),
    proto.getHeader().getClientName(),
    targets,
    PBHelper.convertStorageTypes(proto.getTargetStorageTypesList(), targets.length));
 } finally {
  if (traceScope != null) traceScope.close();
 }
}
origin: ch.cern.hadoop/hadoop-hdfs

public static TraceScope continueTraceSpan(BaseHeaderProto header,
  String description) {
 return continueTraceSpan(header.getTraceInfo(), description);
}
origin: ch.cern.hadoop/hadoop-hdfs

/** Receive {@link Op#REQUEST_SHORT_CIRCUIT_FDS} */
private void opRequestShortCircuitFds(DataInputStream in) throws IOException {
 final OpRequestShortCircuitAccessProto proto =
  OpRequestShortCircuitAccessProto.parseFrom(vintPrefixed(in));
 SlotId slotId = (proto.hasSlotId()) ? 
   PBHelper.convert(proto.getSlotId()) : null;
 TraceScope traceScope = continueTraceSpan(proto.getHeader(),
   proto.getClass().getSimpleName());
 try {
  requestShortCircuitFds(PBHelper.convert(proto.getHeader().getBlock()),
    PBHelper.convert(proto.getHeader().getToken()),
    slotId, proto.getMaxVersion(),
    proto.getSupportsReceiptVerification());
 } finally {
  if (traceScope != null) traceScope.close();
 }
}
origin: ch.cern.hadoop/hadoop-hdfs

public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto other) {
 if (other == org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto.getDefaultInstance()) return this;
 if (other.hasBlock()) {
  mergeBlock(other.getBlock());
 }
 if (other.hasToken()) {
  mergeToken(other.getToken());
 }
 if (other.hasTraceInfo()) {
  mergeTraceInfo(other.getTraceInfo());
 }
 this.mergeUnknownFields(other.getUnknownFields());
 return this;
}
origin: io.prestosql.hadoop/hadoop-apache

/** Receive {@link Op#TRANSFER_BLOCK} */
private void opTransferBlock(DataInputStream in) throws IOException {
 final OpTransferBlockProto proto =
  OpTransferBlockProto.parseFrom(vintPrefixed(in));
 final DatanodeInfo[] targets = PBHelper.convert(proto.getTargetsList());
 TraceScope traceScope = continueTraceSpan(proto.getHeader(),
   proto.getClass().getSimpleName());
 try {
  transferBlock(PBHelper.convert(proto.getHeader().getBaseHeader().getBlock()),
    PBHelper.convert(proto.getHeader().getBaseHeader().getToken()),
    proto.getHeader().getClientName(),
    targets,
    PBHelper.convertStorageTypes(proto.getTargetStorageTypesList(), targets.length));
 } finally {
  if (traceScope != null) traceScope.close();
 }
}
origin: io.prestosql.hadoop/hadoop-apache

public void writeTo(io.prestosql.hadoop.$internal.com.google.protobuf.CodedOutputStream output)
          throws java.io.IOException {
 getSerializedSize();
 if (((bitField0_ & 0x00000001) == 0x00000001)) {
  output.writeMessage(1, block_);
 }
 if (((bitField0_ & 0x00000002) == 0x00000002)) {
  output.writeMessage(2, token_);
 }
 if (((bitField0_ & 0x00000004) == 0x00000004)) {
  output.writeMessage(3, traceInfo_);
 }
 getUnknownFields().writeTo(output);
}
origin: org.apache.hadoop/hadoop-hdfs-client

public Builder toBuilder() { return newBuilder(this); }
origin: ch.cern.hadoop/hadoop-hdfs

 /** Receive OP_BLOCK_CHECKSUM */
 private void opBlockChecksum(DataInputStream in) throws IOException {
  OpBlockChecksumProto proto = OpBlockChecksumProto.parseFrom(vintPrefixed(in));
  TraceScope traceScope = continueTraceSpan(proto.getHeader(),
    proto.getClass().getSimpleName());
  try {
  blockChecksum(PBHelper.convert(proto.getHeader().getBlock()),
    PBHelper.convert(proto.getHeader().getToken()));
  } finally {
   if (traceScope != null) traceScope.close();
  }
 }
}
org.apache.hadoop.hdfs.protocol.protoDataTransferProtos$BaseHeaderProto

Javadoc

Protobuf type hadoop.hdfs.BaseHeaderProto

Most used methods

  • getBlock
    required .hadoop.hdfs.ExtendedBlockProto block = 1;
  • getToken
    optional .hadoop.common.TokenProto token = 2;
  • getTraceInfo
    optional .hadoop.hdfs.DataTransferTraceInfoProto traceInfo = 3;
  • newBuilder
  • <init>
  • equals
  • getDefaultInstance
  • getDescriptorForType
  • getSerializedSize
  • getUnknownFields
  • hasBlock
    required .hadoop.hdfs.ExtendedBlockProto block = 1;
  • hasToken
    optional .hadoop.common.TokenProto token = 2;
  • hasBlock,
  • hasToken,
  • hasTraceInfo,
  • hashCode,
  • initFields,
  • isInitialized,
  • makeExtensionsImmutable,
  • parseUnknownField,
  • toBuilder

Popular in Java

  • Making http post requests using okhttp
  • setRequestProperty (URLConnection)
  • requestLocationUpdates (LocationManager)
  • scheduleAtFixedRate (Timer)
    Schedules the specified task for repeated fixed-rate execution, beginning after the specified delay.
  • ObjectMapper (com.fasterxml.jackson.databind)
    This mapper (or, data binder, or codec) provides functionality for converting between Java objects (
  • FileOutputStream (java.io)
    A file output stream is an output stream for writing data to aFile or to a FileDescriptor. Whether
  • URL (java.net)
    A Uniform Resource Locator that identifies the location of an Internet resource as specified by RFC
  • GregorianCalendar (java.util)
    GregorianCalendar is a concrete subclass of Calendarand provides the standard calendar used by most
  • UUID (java.util)
    UUID is an immutable representation of a 128-bit universally unique identifier (UUID). There are mul
  • Reflections (org.reflections)
    Reflections one-stop-shop objectReflections scans your classpath, indexes the metadata, allows you t
Codota Logo
  • Products

    Search for Java codeSearch for JavaScript codeEnterprise
  • IDE Plugins

    IntelliJ IDEAWebStormAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogCodota Academy Plugin user guide Terms of usePrivacy policyJava Code IndexJavascript Code Index
Get Codota for your IDE now