hfilesCount.incrementAndGet(); isCorrupted = (storeFile.hasFileSize() && storeFile.getFileSize() != size); if (isCorrupted) hfilesCorrupted.incrementAndGet(); } catch (FileNotFoundException e) {
if (storeFile.hasFileSize() && storeFile.getFileSize() != fstat.getLen()) { String msg = "hfile: " + fileName + " size does not match with the expected one. " + " found=" + fstat.getLen() + " expected=" + storeFile.getFileSize();
@Override public void storeFile(final RegionInfo regionInfo, final String family, final SnapshotRegionManifest.StoreFile storeFile) throws IOException { // for storeFile.hasReference() case, copied as part of the manifest if (!storeFile.hasReference()) { String region = regionInfo.getEncodedName(); String hfile = storeFile.getName(); Path path = HFileLink.createPath(table, region, family, hfile); SnapshotFileInfo fileInfo = SnapshotFileInfo.newBuilder() .setType(SnapshotFileInfo.Type.HFILE) .setHfile(path.toString()) .build(); long size; if (storeFile.hasFileSize()) { size = storeFile.getFileSize(); } else { size = HFileLink.buildFromHFileLinkPattern(conf, path).getFileStatus(fs).getLen(); } files.add(new Pair<>(fileInfo, size)); } } });
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile other = (org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile) obj; boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { result = result && getName() .equals(other.getName()); } result = result && (hasReference() == other.hasReference()); if (hasReference()) { result = result && getReference() .equals(other.getReference()); } result = result && (hasFileSize() == other.hasFileSize()); if (hasFileSize()) { result = result && (getFileSize() == other.getFileSize()); } result = result && unknownFields.equals(other.unknownFields); return result; }
@Override public void storeFile(final RegionInfo regionInfo, final String family, final SnapshotRegionManifest.StoreFile storeFile) throws IOException { // for storeFile.hasReference() case, copied as part of the manifest if (!storeFile.hasReference()) { String region = regionInfo.getEncodedName(); String hfile = storeFile.getName(); Path path = HFileLink.createPath(table, region, family, hfile); SnapshotFileInfo fileInfo = SnapshotFileInfo.newBuilder() .setType(SnapshotFileInfo.Type.HFILE) .setHfile(path.toString()) .build(); long size; if (storeFile.hasFileSize()) { size = storeFile.getFileSize(); } else { size = HFileLink.buildFromHFileLinkPattern(conf, path).getFileStatus(fs).getLen(); } files.add(new Pair<>(fileInfo, size)); } } });
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (hasReference()) { hash = (37 * hash) + REFERENCE_FIELD_NUMBER; hash = (53 * hash) + getReference().hashCode(); } if (hasFileSize()) { hash = (37 * hash) + FILE_SIZE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong( getFileSize()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; }
@Override public void storeFile(final RegionInfo regionInfo, final String family, final SnapshotRegionManifest.StoreFile storeFile) throws IOException { // for storeFile.hasReference() case, copied as part of the manifest if (!storeFile.hasReference()) { String region = regionInfo.getEncodedName(); String hfile = storeFile.getName(); Path path = HFileLink.createPath(table, region, family, hfile); SnapshotFileInfo fileInfo = SnapshotFileInfo.newBuilder() .setType(SnapshotFileInfo.Type.HFILE) .setHfile(path.toString()) .build(); long size; if (storeFile.hasFileSize()) { size = storeFile.getFileSize(); } else { size = HFileLink.buildFromHFileLinkPattern(conf, path).getFileStatus(fs).getLen(); } files.add(new Pair<>(fileInfo, size)); } } });
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.getDefaultInstance()) return this; if (other.hasName()) { bitField0_ |= 0x00000001; name_ = other.name_; onChanged(); } if (other.hasReference()) { mergeReference(other.getReference()); } if (other.hasFileSize()) { setFileSize(other.getFileSize()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile)) { return super.equals(obj); } org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile other = (org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile) obj; boolean result = true; result = result && (hasName() == other.hasName()); if (hasName()) { result = result && getName() .equals(other.getName()); } result = result && (hasReference() == other.hasReference()); if (hasReference()) { result = result && getReference() .equals(other.getReference()); } result = result && (hasFileSize() == other.hasFileSize()); if (hasFileSize()) { result = result && (getFileSize() == other.getFileSize()); } result = result && unknownFields.equals(other.unknownFields); return result; }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasName()) { hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); } if (hasReference()) { hash = (37 * hash) + REFERENCE_FIELD_NUMBER; hash = (53 * hash) + getReference().hashCode(); } if (hasFileSize()) { hash = (37 * hash) + FILE_SIZE_FIELD_NUMBER; hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong( getFileSize()); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; }
public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile other) { if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotRegionManifest.StoreFile.getDefaultInstance()) return this; if (other.hasName()) { bitField0_ |= 0x00000001; name_ = other.name_; onChanged(); } if (other.hasReference()) { mergeReference(other.getReference()); } if (other.hasFileSize()) { setFileSize(other.getFileSize()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; }