/** * <code>required .hadoop.common.TokenProto token = 1;</code> */ public Builder setToken( org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) { if (tokenBuilder_ == null) { token_ = builderForValue.build(); onChanged(); } else { tokenBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /**
/** * Construct a TokenProto from this Token instance. * @return a new TokenProto object holding copies of data in this instance */ public TokenProto toTokenProto() { return TokenProto.newBuilder(). setIdentifier(ByteString.copyFrom(this.getIdentifier())). setPassword(ByteString.copyFrom(this.getPassword())). setKindBytes(ByteString.copyFrom( this.getKind().getBytes(), 0, this.getKind().getLength())). setServiceBytes(ByteString.copyFrom( this.getService().getBytes(), 0, this.getService().getLength())). build(); }
subBuilder.mergeFrom(token_); token_ = subBuilder.buildPartial();
public Builder mergeFrom(org.apache.hadoop.security.proto.SecurityProtos.TokenProto other) { if (other == org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()) return this; if (other.hasIdentifier()) { setIdentifier(other.getIdentifier()); } if (other.hasPassword()) { setPassword(other.getPassword()); } if (other.hasKind()) { bitField0_ |= 0x00000004; kind_ = other.kind_; onChanged(); } if (other.hasService()) { bitField0_ |= 0x00000008; service_ = other.service_; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public Builder mergeFrom(org.apache.hadoop.security.proto.SecurityProtos.TokenProto other) { if (other == org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()) return this; if (other.hasIdentifier()) { setIdentifier(other.getIdentifier()); } if (other.hasPassword()) { setPassword(other.getPassword()); } if (other.hasKind()) { bitField0_ |= 0x00000004; kind_ = other.kind_; onChanged(); } if (other.hasService()) { bitField0_ |= 0x00000008; service_ = other.service_; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public Builder mergeFrom(org.apache.hadoop.security.proto.SecurityProtos.TokenProto other) { if (other == org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()) return this; if (other.hasIdentifier()) { setIdentifier(other.getIdentifier()); } if (other.hasPassword()) { setPassword(other.getPassword()); } if (other.hasKind()) { bitField0_ |= 0x00000004; kind_ = other.kind_; onChanged(); } if (other.hasService()) { bitField0_ |= 0x00000008; service_ = other.service_; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public Builder mergeFrom(org.apache.hadoop.security.proto.SecurityProtos.TokenProto other) { if (other == org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()) return this; if (other.hasIdentifier()) { setIdentifier(other.getIdentifier()); } if (other.hasPassword()) { setPassword(other.getPassword()); } if (other.hasKind()) { bitField0_ |= 0x00000004; kind_ = other.kind_; onChanged(); } if (other.hasService()) { bitField0_ |= 0x00000008; service_ = other.service_; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public Builder mergeFrom(org.apache.hadoop.security.proto.SecurityProtos.TokenProto other) { if (other == org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()) return this; if (other.hasIdentifier()) { setIdentifier(other.getIdentifier()); } if (other.hasPassword()) { setPassword(other.getPassword()); } if (other.hasKind()) { bitField0_ |= 0x00000004; kind_ = other.kind_; onChanged(); } if (other.hasService()) { bitField0_ |= 0x00000008; service_ = other.service_; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
/** * <code>required .hadoop.common.TokenProto token = 1;</code> */ public Builder mergeToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) { if (tokenBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && token_ != org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()) { token_ = org.apache.hadoop.security.proto.SecurityProtos.TokenProto.newBuilder(token_).mergeFrom(value).buildPartial(); } else { token_ = value; } onChanged(); } else { tokenBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>required .hadoop.common.TokenProto token = 1;</code> */ public Builder mergeToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) { if (tokenBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && token_ != org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()) { token_ = org.apache.hadoop.security.proto.SecurityProtos.TokenProto.newBuilder(token_).mergeFrom(value).buildPartial(); } else { token_ = value; } onChanged(); } else { tokenBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>optional .hadoop.common.TokenProto token = 1;</code> */ public Builder mergeToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) { if (tokenBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && token_ != org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()) { token_ = org.apache.hadoop.security.proto.SecurityProtos.TokenProto.newBuilder(token_).mergeFrom(value).buildPartial(); } else { token_ = value; } onChanged(); } else { tokenBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>required .hadoop.common.TokenProto token = 2;</code> */ public Builder mergeToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) { if (tokenBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && token_ != org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()) { token_ = org.apache.hadoop.security.proto.SecurityProtos.TokenProto.newBuilder(token_).mergeFrom(value).buildPartial(); } else { token_ = value; } onChanged(); } else { tokenBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /**
/** * <code>required .hadoop.common.TokenProto token = 1;</code> */ public Builder mergeToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) { if (tokenBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && token_ != org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()) { token_ = org.apache.hadoop.security.proto.SecurityProtos.TokenProto.newBuilder(token_).mergeFrom(value).buildPartial(); } else { token_ = value; } onChanged(); } else { tokenBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>optional .hadoop.common.TokenProto jobToken = 2;</code> */ public Builder mergeJobToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) { if (jobTokenBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && jobToken_ != org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()) { jobToken_ = org.apache.hadoop.security.proto.SecurityProtos.TokenProto.newBuilder(jobToken_).mergeFrom(value).buildPartial(); } else { jobToken_ = value; } onChanged(); } else { jobTokenBuilder_.mergeFrom(value); } bitField0_ |= 0x00000002; return this; } /**
private void recordJobShuffleInfo(JobID jobId, String user, Token<JobTokenIdentifier> jobToken) throws IOException { if (stateDb != null) { TokenProto tokenProto = TokenProto.newBuilder() .setIdentifier(ByteString.copyFrom(jobToken.getIdentifier())) .setPassword(ByteString.copyFrom(jobToken.getPassword())) .setKind(jobToken.getKind().toString()) .setService(jobToken.getService().toString()) .build(); JobShuffleInfoProto proto = JobShuffleInfoProto.newBuilder() .setUser(user).setJobToken(tokenProto).build(); try { stateDb.put(bytes(jobId.toString()), proto.toByteArray()); } catch (DBException e) { throw new IOException("Error storing " + jobId, e); } } addJobToken(jobId, user, jobToken); }
private void recordJobShuffleInfo(JobID jobId, String user, Token<JobTokenIdentifier> jobToken) throws IOException { if (stateDb != null) { TokenProto tokenProto = TokenProto.newBuilder() .setIdentifier(ByteString.copyFrom(jobToken.getIdentifier())) .setPassword(ByteString.copyFrom(jobToken.getPassword())) .setKind(jobToken.getKind().toString()) .setService(jobToken.getService().toString()) .build(); JobShuffleInfoProto proto = JobShuffleInfoProto.newBuilder() .setUser(user).setJobToken(tokenProto).build(); try { stateDb.put(bytes(jobId.toString()), proto.toByteArray()); } catch (DBException e) { throw new IOException("Error storing " + jobId, e); } } addJobToken(jobId, user, jobToken); }
private void recordJobShuffleInfo(JobID jobId, String user, Token<JobTokenIdentifier> jobToken) throws IOException { if (stateDb != null) { TokenProto tokenProto = TokenProto.newBuilder() .setIdentifier(ByteString.copyFrom(jobToken.getIdentifier())) .setPassword(ByteString.copyFrom(jobToken.getPassword())) .setKind(jobToken.getKind().toString()) .setService(jobToken.getService().toString()) .build(); JobShuffleInfoProto proto = JobShuffleInfoProto.newBuilder() .setUser(user).setJobToken(tokenProto).build(); try { stateDb.put(bytes(jobId.toString()), proto.toByteArray()); } catch (DBException e) { throw new IOException("Error storing " + jobId, e); } } addJobToken(jobId, user, jobToken); }
public final boolean isInitialized() { if (!hasIdentifier()) { return false; } if (!hasPassword()) { return false; } if (!hasKind()) { return false; } if (!hasService()) { return false; } return true; }
public final boolean isInitialized() { if (!hasIdentifier()) { return false; } if (!hasPassword()) { return false; } if (!hasKind()) { return false; } if (!hasService()) { return false; } return true; }
/** * <code>required .hadoop.common.TokenProto token = 1;</code> */ public Builder setToken( org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) { if (tokenBuilder_ == null) { token_ = builderForValue.build(); onChanged(); } else { tokenBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /**