fileSummary.writeDelimitedTo(out);
public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary buildPartial() { org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.ondiskVersion_ = ondiskVersion_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.layoutVersion_ = layoutVersion_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.codec_ = codec_; if (sectionsBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008)) { sections_ = java.util.Collections.unmodifiableList(sections_); bitField0_ = (bitField0_ & ~0x00000008); } result.sections_ = sections_; } else { result.sections_ = sectionsBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; }
Lists.newArrayList(summary.getSectionsList()); Collections.sort(sections, new Comparator<FsImageProto.FileSummary.Section>() { fin.getChannel().position(s.getOffset()); InputStream is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(), new BufferedInputStream(new LimitInputStream( fin, s.getLength())));
if (requireSameLayoutVersion && summary.getLayoutVersion() != HdfsConstants.NAMENODE_LAYOUT_VERSION) { throw new IOException("Image version " + summary.getLayoutVersion() + " is not equal to the software version " + HdfsConstants.NAMENODE_LAYOUT_VERSION); .getSectionsList()); Collections.sort(sections, new Comparator<FileSummary.Section>() { @Override summary.getCodec(), in);
if (requireSameLayoutVersion && summary.getLayoutVersion() != HdfsConstants.NAMENODE_LAYOUT_VERSION) { throw new IOException("Image version " + summary.getLayoutVersion() + " is not equal to the software version " + HdfsConstants.NAMENODE_LAYOUT_VERSION); .getSectionsList()); Collections.sort(sections, new Comparator<FileSummary.Section>() { @Override summary.getCodec(), in);
Lists.newArrayList(summary.getSectionsList()); Collections.sort(sections, new Comparator<FsImageProto.FileSummary.Section>() { fin.getChannel().position(s.getOffset()); InputStream is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(), new BufferedInputStream(new LimitInputStream( fin, s.getLength())));
Lists.newArrayList(summary.getSectionsList()); Collections.sort(sections, new Comparator<FsImageProto.FileSummary.Section>() { fin.getChannel().position(s.getOffset()); InputStream is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(), new BufferedInputStream(new LimitInputStream( fin, s.getLength())));
.getSectionsList()); Collections.sort(sections, new Comparator<FileSummary.Section>() { @Override fin.getChannel().position(s.getOffset()); InputStream is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(), new BufferedInputStream(new LimitInputStream( fin, s.getLength())));
.getSectionsList()); Collections.sort(sections, new Comparator<FileSummary.Section>() { @Override fin.getChannel().position(s.getOffset()); InputStream is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(), new BufferedInputStream(new LimitInputStream( fin, s.getLength())));
InputStream is; ArrayList<FileSummary.Section> sections = Lists.newArrayList(summary.getSectionsList()); Collections.sort(sections, new Comparator<FileSummary.Section>() { fin.getChannel().position(section.getOffset()); is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(), new BufferedInputStream(new LimitInputStream( fin, section.getLength()))); switch (SectionName.fromString(section.getName())) {
InputStream is; ArrayList<FileSummary.Section> sections = Lists.newArrayList(summary.getSectionsList()); Collections.sort(sections, new Comparator<FileSummary.Section>() { fin.getChannel().position(section.getOffset()); is = FSImageUtil.wrapInputStreamForCompression(conf, summary.getCodec(), new BufferedInputStream(new LimitInputStream( fin, section.getLength()))); switch (SectionName.fromString(section.getName())) {
s.writeDelimitedTo(raw); int length = getOndiskSize(s); byte[] lengthBytes = new byte[4];
public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary other) { if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.getDefaultInstance()) return this; if (other.hasOndiskVersion()) { setOndiskVersion(other.getOndiskVersion()); if (other.hasLayoutVersion()) { setLayoutVersion(other.getLayoutVersion()); if (other.hasCodec()) { bitField0_ |= 0x00000004; codec_ = other.codec_; this.mergeUnknownFields(other.getUnknownFields()); return this;
public Builder mergeFrom(org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary other) { if (other == org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary.getDefaultInstance()) return this; if (other.hasOndiskVersion()) { setOndiskVersion(other.getOndiskVersion()); if (other.hasLayoutVersion()) { setLayoutVersion(other.getLayoutVersion()); if (other.hasCodec()) { bitField0_ |= 0x00000004; codec_ = other.codec_; this.mergeUnknownFields(other.getUnknownFields()); return this;
public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary buildPartial() { org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.ondiskVersion_ = ondiskVersion_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.layoutVersion_ = layoutVersion_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.codec_ = codec_; if (sectionsBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008)) { sections_ = java.util.Collections.unmodifiableList(sections_); bitField0_ = (bitField0_ & ~0x00000008); } result.sections_ = sections_; } else { result.sections_ = sectionsBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; }
public org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary buildPartial() { org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary result = new org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.ondiskVersion_ = ondiskVersion_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.layoutVersion_ = layoutVersion_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.codec_ = codec_; if (sectionsBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008)) { sections_ = java.util.Collections.unmodifiableList(sections_); bitField0_ = (bitField0_ & ~0x00000008); } result.sections_ = sections_; } else { result.sections_ = sectionsBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; }
public static FileSummary loadSummary(RandomAccessFile file) throws IOException { final int FILE_LENGTH_FIELD_SIZE = 4; long fileLength = file.length(); file.seek(fileLength - FILE_LENGTH_FIELD_SIZE); int summaryLength = file.readInt(); if (summaryLength <= 0) { throw new IOException("Negative length of the file"); } file.seek(fileLength - FILE_LENGTH_FIELD_SIZE - summaryLength); byte[] summaryBytes = new byte[summaryLength]; file.readFully(summaryBytes); FileSummary summary = FileSummary .parseDelimitedFrom(new ByteArrayInputStream(summaryBytes)); if (summary.getOndiskVersion() != FILE_VERSION) { throw new IOException("Unsupported file version " + summary.getOndiskVersion()); } if (!NameNodeLayoutVersion.supports(Feature.PROTOBUF_FORMAT, summary.getLayoutVersion())) { throw new IOException("Unsupported layout version " + summary.getLayoutVersion()); } return summary; }
public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += io.prestosql.hadoop.$internal.com.google.protobuf.CodedOutputStream .computeUInt32Size(1, ondiskVersion_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += io.prestosql.hadoop.$internal.com.google.protobuf.CodedOutputStream .computeUInt32Size(2, layoutVersion_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += io.prestosql.hadoop.$internal.com.google.protobuf.CodedOutputStream .computeBytesSize(3, getCodecBytes()); } for (int i = 0; i < sections_.size(); i++) { size += io.prestosql.hadoop.$internal.com.google.protobuf.CodedOutputStream .computeMessageSize(4, sections_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; }
public static FileSummary loadSummary(RandomAccessFile file) throws IOException { final int FILE_LENGTH_FIELD_SIZE = 4; long fileLength = file.length(); file.seek(fileLength - FILE_LENGTH_FIELD_SIZE); int summaryLength = file.readInt(); if (summaryLength <= 0) { throw new IOException("Negative length of the file"); } file.seek(fileLength - FILE_LENGTH_FIELD_SIZE - summaryLength); byte[] summaryBytes = new byte[summaryLength]; file.readFully(summaryBytes); FileSummary summary = FileSummary .parseDelimitedFrom(new ByteArrayInputStream(summaryBytes)); if (summary.getOndiskVersion() != FILE_VERSION) { throw new IOException("Unsupported file version " + summary.getOndiskVersion()); } if (!NameNodeLayoutVersion.supports(Feature.PROTOBUF_FORMAT, summary.getLayoutVersion())) { throw new IOException("Unsupported layout version " + summary.getLayoutVersion()); } return summary; }
public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(1, ondiskVersion_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeUInt32Size(2, layoutVersion_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, getCodecBytes()); } for (int i = 0; i < sections_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(4, sections_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; }