/** * Recalculate the size of the compaction based on current files. */ private void recalculateSize() { this.totalSize = filesToCompact.stream().map(HStoreFile::getReader) .mapToLong(r -> r != null ? r.length() : 0L).sum(); } }
/** * Find the total size of a list of store files. * @param potentialMatchFiles StoreFile list. * @return Sum of StoreFile.getReader().length(); */ private long getTotalStoreSize(List<HStoreFile> potentialMatchFiles) { return potentialMatchFiles.stream().mapToLong(sf -> sf.getReader().length()).sum(); }
public static long getTotalFileSize(final Collection<HStoreFile> candidates) { long totalSize = 0; for (HStoreFile storeFile : candidates) { totalSize += storeFile.getReader().length(); } return totalSize; }
/** * Gets the total size of all files in the stripe. * @param stripeIndex Stripe index. * @return Size. */ private long getStripeFilesSize(int stripeIndex) { long result = 0; for (HStoreFile sf : state.stripeFiles.get(stripeIndex)) { result += sf.getReader().length(); } return result; }
private long getTotalSize(Collection<HStoreFile> sfs) { return sfs.stream().mapToLong(sf -> sf.getReader().length()).sum(); }
/** * Gets the largest file (with reader) out of the list of files. * @param candidates The files to choose from. * @return The largest file; null if no file has a reader. */ static Optional<HStoreFile> getLargestFile(Collection<HStoreFile> candidates) { return candidates.stream().filter(f -> f.getReader() != null) .max((f1, f2) -> Long.compare(f1.getReader().length(), f2.getReader().length())); }
private long getStorefilesSize(Collection<HStoreFile> files, Predicate<HStoreFile> predicate) { return files.stream().filter(f -> f != null && f.getReader() != null).filter(predicate) .mapToLong(f -> f.getReader().length()).sum(); }
@Override public String toString() { String fsList = filesToCompact.stream().filter(f -> f.getReader() != null) .map(f -> TraditionalBinaryPrefix.long2String(f.getReader().length(), "", 1)) .collect(Collectors.joining(", ")); return "regionName=" + regionName + ", storeName=" + storeName + ", fileCount=" + this.getFiles().size() + ", fileSize=" + TraditionalBinaryPrefix.long2String(totalSize, "", 1) + ((fsList.isEmpty()) ? "" : " (" + fsList + ")") + ", priority=" + priority + ", time=" + selectionTime; }
@Override public Long apply(HStoreFile sf) { if (sf.getReader() != null) { return sf.getReader().length(); } else { // the reader may be null for the compacted files and if the archiving // had failed. return -1L; } } }
/** * @param candidates pre-filtrate * @return filtered subset exclude all files above maxCompactSize * Also save all references. We MUST compact them */ protected ArrayList<HStoreFile> skipLargeFiles(ArrayList<HStoreFile> candidates, boolean mayUseOffpeak) { int pos = 0; while (pos < candidates.size() && !candidates.get(pos).isReference() && (candidates.get(pos).getReader().length() > comConf.getMaxCompactSize(mayUseOffpeak))) { ++pos; } if (pos > 0) { LOG.debug("Some files are too large. Excluding " + pos + " files from compaction candidates"); candidates.subList(0, pos).clear(); } return candidates; }
/** * Check that all files satisfy the constraint * FileSize(i) <= ( Sum(0,N,FileSize(_)) - FileSize(i) ) * Ratio. * * @param files List of store files to consider as a compaction candidate. * @param currentRatio The ratio to use. * @return a boolean if these files satisfy the ratio constraints. */ private boolean filesInRatio(List<HStoreFile> files, double currentRatio) { if (files.size() < 2) { return true; } long totalFileSize = getTotalStoreSize(files); for (HStoreFile file : files) { long singleFileSize = file.getReader().length(); long sumAllOtherFileSizes = totalFileSize - singleFileSize; if (singleFileSize > sumAllOtherFileSizes * currentRatio) { return false; } } return true; } }
long[] getSizes(List<HStoreFile> sfList) { long[] aNums = new long[sfList.size()]; for (int i = 0; i < sfList.size(); ++i) { aNums[i] = sfList.get(i).getReader().length(); } return aNums; }
private void bulkLoadHFile(HStoreFile sf) throws IOException { StoreFileReader r = sf.getReader(); this.storeSize.addAndGet(r.length()); this.totalUncompressedBytes.addAndGet(r.getTotalUncompressedBytes()); // Append the new storefile into the list this.lock.writeLock().lock(); try { this.storeEngine.getStoreFileManager().insertNewFiles(Lists.newArrayList(sf)); } finally { // We need the lock, as long as we are updating the storeFiles // or changing the memstore. Let us release it before calling // notifyChangeReadersObservers. See HBASE-4485 for a possible // deadlock scenario that could have happened if continue to hold // the lock. this.lock.writeLock().unlock(); } LOG.info("Loaded HFile " + sf.getFileInfo() + " into store '" + getColumnFamilyName()); if (LOG.isTraceEnabled()) { String traceMessage = "BULK LOAD time,size,store size,store files [" + EnvironmentEdgeManager.currentTime() + "," + r.length() + "," + storeSize + "," + storeEngine.getStoreFileManager().getStorefileCount() + "]"; LOG.trace(traceMessage); } }
private static HStoreFile createFile() throws Exception { HStoreFile sf = mock(HStoreFile.class); when(sf.getPath()).thenReturn(new Path("file")); StoreFileReader r = mock(StoreFileReader.class); when(r.length()).thenReturn(10L); when(sf.getReader()).thenReturn(r); return sf; }
private HStoreFile mockStoreFileWithLength(long length) { HStoreFile sf = mock(HStoreFile.class); StoreFileReader sfr = mock(StoreFileReader.class); when(sf.isHFile()).thenReturn(true); when(sf.getReader()).thenReturn(sfr); when(sfr.length()).thenReturn(length); return sf; }
private List<HStoreFile> runIteration(List<HStoreFile> startingStoreFiles) throws IOException { List<HStoreFile> storeFiles = new ArrayList<>(startingStoreFiles); CompactionRequestImpl req = cp.selectCompaction( storeFiles, new ArrayList<>(), false, false, false); long newFileSize = 0; Collection<HStoreFile> filesToCompact = req.getFiles(); if (!filesToCompact.isEmpty()) { storeFiles = new ArrayList<>(storeFiles); storeFiles.removeAll(filesToCompact); for (HStoreFile storeFile : filesToCompact) { newFileSize += storeFile.getReader().length(); } storeFiles.add(createMockStoreFileBytes(newFileSize)); } written += newFileSize; return storeFiles; }
/** * Create a mock StoreFile with the given attributes. */ private HStoreFile mockStoreFile(boolean bulkLoad, long size, long bulkTimestamp, long seqId, String path) { HStoreFile mock = Mockito.mock(HStoreFile.class); StoreFileReader reader = Mockito.mock(StoreFileReader.class); Mockito.doReturn(size).when(reader).length(); Mockito.doReturn(reader).when(mock).getReader(); Mockito.doReturn(bulkLoad).when(mock).isBulkLoadResult(); Mockito.doReturn(OptionalLong.of(bulkTimestamp)).when(mock).getBulkLoadTimestamp(); Mockito.doReturn(seqId).when(mock).getMaxSequenceId(); Mockito.doReturn(new Path(path)).when(mock).getPath(); String name = "mock storefile, bulkLoad=" + bulkLoad + " bulkTimestamp=" + bulkTimestamp + " seqId=" + seqId + " path=" + path; Mockito.doReturn(name).when(mock).toString(); return mock; }
public static HStoreFile createDummyStoreFile(long maxSequenceId) throws Exception { // "Files" are totally unused, it's Scanner class below that gives compactor fake KVs. // But compaction depends on everything under the sun, so stub everything with dummies. HStoreFile sf = mock(HStoreFile.class); StoreFileReader r = mock(StoreFileReader.class); when(r.length()).thenReturn(1L); when(r.getBloomFilterType()).thenReturn(BloomType.NONE); when(r.getHFileReader()).thenReturn(mock(HFile.Reader.class)); when(r.getStoreFileScanner(anyBoolean(), anyBoolean(), anyBoolean(), anyLong(), anyLong(), anyBoolean())).thenReturn(mock(StoreFileScanner.class)); when(sf.getReader()).thenReturn(r); when(sf.getMaxSequenceId()).thenReturn(maxSequenceId); return sf; }
private static HStoreFile createFile(long size) throws Exception { HStoreFile sf = mock(HStoreFile.class); when(sf.getPath()).thenReturn(new Path("moo")); StoreFileReader r = mock(StoreFileReader.class); when(r.getEntries()).thenReturn(size); when(r.length()).thenReturn(size); when(r.getBloomFilterType()).thenReturn(BloomType.NONE); when(r.getHFileReader()).thenReturn(mock(HFile.Reader.class)); when(r.getStoreFileScanner(anyBoolean(), anyBoolean(), anyBoolean(), anyLong(), anyLong(), anyBoolean())).thenReturn(mock(StoreFileScanner.class)); when(sf.getReader()).thenReturn(r); when(sf.getBulkLoadTimestamp()).thenReturn(OptionalLong.empty()); return sf; }
protected HStoreFile createMockStoreFile(final long sizeInBytes, final long seqId) { HStoreFile mockSf = mock(HStoreFile.class); StoreFileReader reader = mock(StoreFileReader.class); String stringPath = "/hbase/testTable/regionA/" + RandomStringUtils.random(FILENAME_LENGTH, 0, 0, true, true, null, random); Path path = new Path(stringPath); when(reader.getSequenceID()).thenReturn(seqId); when(reader.getTotalUncompressedBytes()).thenReturn(sizeInBytes); when(reader.length()).thenReturn(sizeInBytes); when(mockSf.getPath()).thenReturn(path); when(mockSf.excludeFromMinorCompaction()).thenReturn(false); when(mockSf.isReference()).thenReturn(false); // TODO come back to // this when selection takes this into account when(mockSf.getReader()).thenReturn(reader); String toString = MoreObjects.toStringHelper("MockStoreFile") .add("isReference", false) .add("fileSize", StringUtils.humanReadableInt(sizeInBytes)) .add("seqId", seqId) .add("path", stringPath).toString(); when(mockSf.toString()).thenReturn(toString); return mockSf; } }