Codota Logo
DataTransferProtos$BaseHeaderProto.getTraceInfo
Code IndexAdd Codota to your IDE (free)

How to use
getTraceInfo
method
in
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos$BaseHeaderProto

Best Java code snippets using org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos$BaseHeaderProto.getTraceInfo (Showing top 15 results out of 315)

  • Common ways to obtain DataTransferProtos$BaseHeaderProto
private void myMethod () {
DataTransferProtos$BaseHeaderProto d =
  • Codota IconDataTransferProtos.BaseHeaderProto$Builder dataTransferProtosBaseHeaderProto$Builder;dataTransferProtosBaseHeaderProto$Builder.build()
  • Codota IconDataTransferProtos.BaseHeaderProto$Builder dataTransferProtosBaseHeaderProto$Builder;dataTransferProtosBaseHeaderProto$Builder.buildPartial()
  • Codota IconObject object;(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto) object
  • Smart code suggestions by Codota
}
origin: org.apache.hadoop/hadoop-hdfs-client

@java.lang.Override
public int hashCode() {
 if (memoizedHashCode != 0) {
  return memoizedHashCode;
 }
 int hash = 41;
 hash = (19 * hash) + getDescriptorForType().hashCode();
 if (hasBlock()) {
  hash = (37 * hash) + BLOCK_FIELD_NUMBER;
  hash = (53 * hash) + getBlock().hashCode();
 }
 if (hasToken()) {
  hash = (37 * hash) + TOKEN_FIELD_NUMBER;
  hash = (53 * hash) + getToken().hashCode();
 }
 if (hasTraceInfo()) {
  hash = (37 * hash) + TRACEINFO_FIELD_NUMBER;
  hash = (53 * hash) + getTraceInfo().hashCode();
 }
 hash = (29 * hash) + getUnknownFields().hashCode();
 memoizedHashCode = hash;
 return hash;
}
origin: ch.cern.hadoop/hadoop-hdfs

@java.lang.Override
public int hashCode() {
 if (memoizedHashCode != 0) {
  return memoizedHashCode;
 }
 int hash = 41;
 hash = (19 * hash) + getDescriptorForType().hashCode();
 if (hasBlock()) {
  hash = (37 * hash) + BLOCK_FIELD_NUMBER;
  hash = (53 * hash) + getBlock().hashCode();
 }
 if (hasToken()) {
  hash = (37 * hash) + TOKEN_FIELD_NUMBER;
  hash = (53 * hash) + getToken().hashCode();
 }
 if (hasTraceInfo()) {
  hash = (37 * hash) + TRACEINFO_FIELD_NUMBER;
  hash = (53 * hash) + getTraceInfo().hashCode();
 }
 hash = (29 * hash) + getUnknownFields().hashCode();
 memoizedHashCode = hash;
 return hash;
}
origin: io.prestosql.hadoop/hadoop-apache

@java.lang.Override
public int hashCode() {
 if (memoizedHashCode != 0) {
  return memoizedHashCode;
 }
 int hash = 41;
 hash = (19 * hash) + getDescriptorForType().hashCode();
 if (hasBlock()) {
  hash = (37 * hash) + BLOCK_FIELD_NUMBER;
  hash = (53 * hash) + getBlock().hashCode();
 }
 if (hasToken()) {
  hash = (37 * hash) + TOKEN_FIELD_NUMBER;
  hash = (53 * hash) + getToken().hashCode();
 }
 if (hasTraceInfo()) {
  hash = (37 * hash) + TRACEINFO_FIELD_NUMBER;
  hash = (53 * hash) + getTraceInfo().hashCode();
 }
 hash = (29 * hash) + getUnknownFields().hashCode();
 memoizedHashCode = hash;
 return hash;
}
origin: org.apache.hadoop/hadoop-hdfs

private TraceScope continueTraceSpan(BaseHeaderProto header,
                      String description) {
 return continueTraceSpan(header.getTraceInfo(), description);
}
origin: io.prestosql.hadoop/hadoop-apache

public static TraceScope continueTraceSpan(BaseHeaderProto header,
  String description) {
 return continueTraceSpan(header.getTraceInfo(), description);
}
origin: ch.cern.hadoop/hadoop-hdfs

public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto other) {
 if (other == org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto.getDefaultInstance()) return this;
 if (other.hasBlock()) {
  mergeBlock(other.getBlock());
 }
 if (other.hasToken()) {
  mergeToken(other.getToken());
 }
 if (other.hasTraceInfo()) {
  mergeTraceInfo(other.getTraceInfo());
 }
 this.mergeUnknownFields(other.getUnknownFields());
 return this;
}
origin: ch.cern.hadoop/hadoop-hdfs

public static TraceScope continueTraceSpan(BaseHeaderProto header,
  String description) {
 return continueTraceSpan(header.getTraceInfo(), description);
}
origin: org.apache.hadoop/hadoop-hdfs-client

public final boolean isInitialized() {
 byte isInitialized = memoizedIsInitialized;
 if (isInitialized != -1) return isInitialized == 1;
 if (!hasBlock()) {
  memoizedIsInitialized = 0;
  return false;
 }
 if (!getBlock().isInitialized()) {
  memoizedIsInitialized = 0;
  return false;
 }
 if (hasToken()) {
  if (!getToken().isInitialized()) {
   memoizedIsInitialized = 0;
   return false;
  }
 }
 if (hasTraceInfo()) {
  if (!getTraceInfo().isInitialized()) {
   memoizedIsInitialized = 0;
   return false;
  }
 }
 memoizedIsInitialized = 1;
 return true;
}
origin: ch.cern.hadoop/hadoop-hdfs

public final boolean isInitialized() {
 byte isInitialized = memoizedIsInitialized;
 if (isInitialized != -1) return isInitialized == 1;
 if (!hasBlock()) {
  memoizedIsInitialized = 0;
  return false;
 }
 if (!getBlock().isInitialized()) {
  memoizedIsInitialized = 0;
  return false;
 }
 if (hasToken()) {
  if (!getToken().isInitialized()) {
   memoizedIsInitialized = 0;
   return false;
  }
 }
 if (hasTraceInfo()) {
  if (!getTraceInfo().isInitialized()) {
   memoizedIsInitialized = 0;
   return false;
  }
 }
 memoizedIsInitialized = 1;
 return true;
}
origin: ch.cern.hadoop/hadoop-hdfs

@java.lang.Override
public boolean equals(final java.lang.Object obj) {
 if (obj == this) {
  return true;
 }
 if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto)) {
  return super.equals(obj);
 }
 org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto other = (org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto) obj;
 boolean result = true;
 result = result && (hasBlock() == other.hasBlock());
 if (hasBlock()) {
  result = result && getBlock()
    .equals(other.getBlock());
 }
 result = result && (hasToken() == other.hasToken());
 if (hasToken()) {
  result = result && getToken()
    .equals(other.getToken());
 }
 result = result && (hasTraceInfo() == other.hasTraceInfo());
 if (hasTraceInfo()) {
  result = result && getTraceInfo()
    .equals(other.getTraceInfo());
 }
 result = result &&
   getUnknownFields().equals(other.getUnknownFields());
 return result;
}
origin: org.apache.hadoop/hadoop-hdfs-client

@java.lang.Override
public boolean equals(final java.lang.Object obj) {
 if (obj == this) {
  return true;
 }
 if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto)) {
  return super.equals(obj);
 }
 org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto other = (org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto) obj;
 boolean result = true;
 result = result && (hasBlock() == other.hasBlock());
 if (hasBlock()) {
  result = result && getBlock()
    .equals(other.getBlock());
 }
 result = result && (hasToken() == other.hasToken());
 if (hasToken()) {
  result = result && getToken()
    .equals(other.getToken());
 }
 result = result && (hasTraceInfo() == other.hasTraceInfo());
 if (hasTraceInfo()) {
  result = result && getTraceInfo()
    .equals(other.getTraceInfo());
 }
 result = result &&
   getUnknownFields().equals(other.getUnknownFields());
 return result;
}
origin: io.prestosql.hadoop/hadoop-apache

public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto other) {
 if (other == org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto.getDefaultInstance()) return this;
 if (other.hasBlock()) {
  mergeBlock(other.getBlock());
 }
 if (other.hasToken()) {
  mergeToken(other.getToken());
 }
 if (other.hasTraceInfo()) {
  mergeTraceInfo(other.getTraceInfo());
 }
 this.mergeUnknownFields(other.getUnknownFields());
 return this;
}
origin: io.prestosql.hadoop/hadoop-apache

@java.lang.Override
public boolean equals(final java.lang.Object obj) {
 if (obj == this) {
  return true;
 }
 if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto)) {
  return super.equals(obj);
 }
 org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto other = (org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto) obj;
 boolean result = true;
 result = result && (hasBlock() == other.hasBlock());
 if (hasBlock()) {
  result = result && getBlock()
    .equals(other.getBlock());
 }
 result = result && (hasToken() == other.hasToken());
 if (hasToken()) {
  result = result && getToken()
    .equals(other.getToken());
 }
 result = result && (hasTraceInfo() == other.hasTraceInfo());
 if (hasTraceInfo()) {
  result = result && getTraceInfo()
    .equals(other.getTraceInfo());
 }
 result = result &&
   getUnknownFields().equals(other.getUnknownFields());
 return result;
}
origin: org.apache.hadoop/hadoop-hdfs-client

public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto other) {
 if (other == org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.BaseHeaderProto.getDefaultInstance()) return this;
 if (other.hasBlock()) {
  mergeBlock(other.getBlock());
 }
 if (other.hasToken()) {
  mergeToken(other.getToken());
 }
 if (other.hasTraceInfo()) {
  mergeTraceInfo(other.getTraceInfo());
 }
 this.mergeUnknownFields(other.getUnknownFields());
 return this;
}
origin: io.prestosql.hadoop/hadoop-apache

public final boolean isInitialized() {
 byte isInitialized = memoizedIsInitialized;
 if (isInitialized != -1) return isInitialized == 1;
 if (!hasBlock()) {
  memoizedIsInitialized = 0;
  return false;
 }
 if (!getBlock().isInitialized()) {
  memoizedIsInitialized = 0;
  return false;
 }
 if (hasToken()) {
  if (!getToken().isInitialized()) {
   memoizedIsInitialized = 0;
   return false;
  }
 }
 if (hasTraceInfo()) {
  if (!getTraceInfo().isInitialized()) {
   memoizedIsInitialized = 0;
   return false;
  }
 }
 memoizedIsInitialized = 1;
 return true;
}
org.apache.hadoop.hdfs.protocol.protoDataTransferProtos$BaseHeaderProtogetTraceInfo

Javadoc

optional .hadoop.hdfs.DataTransferTraceInfoProto traceInfo = 3;

Popular methods of DataTransferProtos$BaseHeaderProto

  • getBlock
    required .hadoop.hdfs.ExtendedBlockProto block = 1;
  • getToken
    optional .hadoop.common.TokenProto token = 2;
  • newBuilder
  • <init>
  • equals
  • getDefaultInstance
  • getDescriptorForType
  • getSerializedSize
  • getUnknownFields
  • hasBlock
    required .hadoop.hdfs.ExtendedBlockProto block = 1;
  • hasToken
    optional .hadoop.common.TokenProto token = 2;
  • hasTraceInfo
    optional .hadoop.hdfs.DataTransferTraceInfoProto traceInfo = 3;
  • hasToken,
  • hasTraceInfo,
  • hashCode,
  • initFields,
  • isInitialized,
  • makeExtensionsImmutable,
  • parseUnknownField,
  • toBuilder

Popular in Java

  • Parsing JSON documents to java classes using gson
  • startActivity (Activity)
  • getResourceAsStream (ClassLoader)
    Returns a stream for the resource with the specified name. See #getResource(String) for a descriptio
  • putExtra (Intent)
  • FileWriter (java.io)
    Convenience class for writing character files. The constructors of this class assume that the defaul
  • SocketException (java.net)
    This SocketException may be thrown during socket creation or setting options, and is the superclass
  • TreeMap (java.util)
    A Red-Black tree based NavigableMap implementation. The map is sorted according to the Comparable of
  • Stream (java.util.stream)
    A sequence of elements supporting sequential and parallel aggregate operations. The following exampl
  • JPanel (javax.swing)
  • Base64 (org.apache.commons.codec.binary)
    Provides Base64 encoding and decoding as defined by RFC 2045.This class implements section 6.8. Base
Codota Logo
  • Products

    Search for Java codeSearch for JavaScript codeEnterprise
  • IDE Plugins

    IntelliJ IDEAWebStormAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogCodota Academy Plugin user guide Terms of usePrivacy policyJava Code IndexJavascript Code Index
Get Codota for your IDE now