private HFileBlock getSampleHFileBlock(List<KeyValue> kvs, boolean useTag) { ByteBuffer keyValues = RedundantKVGenerator.convertKvToByteBuffer(kvs, includesMemstoreTS); int size = keyValues.limit(); ByteBuffer buf = ByteBuffer.allocate(size + HConstants.HFILEBLOCK_HEADER_SIZE); buf.position(HConstants.HFILEBLOCK_HEADER_SIZE); keyValues.rewind(); buf.put(keyValues); HFileContext meta = new HFileContextBuilder() .withIncludesMvcc(includesMemstoreTS) .withIncludesTags(useTag) .withHBaseCheckSum(true) .withCompression(Algorithm.NONE) .withBlockSize(0) .withChecksumType(ChecksumType.NULL) .build(); HFileBlock b = new HFileBlock(BlockType.DATA, size, size, -1, buf, HFileBlock.FILL_HEADER, 0, 0, -1, meta); return b; }
/** * Create mulitple partition files */ private void createMobFile(Path basePath) throws IOException { HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); MobFileName mobFileName = null; int ii = 0; Date today = new Date(); for (byte k0 : KEYS) { byte[] startRow = Bytes.toBytes(ii++); mobFileName = MobFileName.create(startRow, MobUtils.formatDate(today), mobSuffix); StoreFileWriter mobFileWriter = new StoreFileWriter.Builder(conf, cacheConf, fs).withFileContext(meta) .withFilePath(new Path(basePath, mobFileName.getFileName())).build(); long now = System.currentTimeMillis(); try { for (int i = 0; i < 10; i++) { byte[] key = Bytes.add(Bytes.toBytes(k0), Bytes.toBytes(i)); byte[] dummyData = new byte[5000]; new Random().nextBytes(dummyData); mobFileWriter.append( new KeyValue(key, Bytes.toBytes(family), Bytes.toBytes(qf), now, Type.Put, dummyData)); } } finally { mobFileWriter.close(); } } }
private void createStoreFiles(Path basePath, String family, String qualifier, int count, Type type, boolean sameStartKey, final Date date) throws IOException { HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); String startKey = "row_"; MobFileName mobFileName = null; for (int i = 0; i < count; i++) { byte[] startRow; if (sameStartKey) { // When creating multiple files under one partition, suffix needs to be different. startRow = Bytes.toBytes(startKey); mobSuffix = TEST_UTIL.getRandomUUID().toString().replaceAll("-", ""); delSuffix = TEST_UTIL.getRandomUUID().toString().replaceAll("-", "") + "_del"; } else { startRow = Bytes.toBytes(startKey + i); } if(type.equals(Type.Delete)) { mobFileName = MobFileName.create(startRow, MobUtils.formatDate(date), delSuffix); } if(type.equals(Type.Put)){ mobFileName = MobFileName.create(startRow, MobUtils.formatDate(date), mobSuffix); } StoreFileWriter mobFileWriter = new StoreFileWriter.Builder(conf, cacheConf, fs) .withFileContext(meta).withFilePath(new Path(basePath, mobFileName.getFileName())).build(); writeStoreFile(mobFileWriter, startRow, Bytes.toBytes(family), Bytes.toBytes(qualifier), type, (i+1)*1000); } }
private void testHeaderSizeInCacheWithoutChecksumInternals(boolean useTags) throws IOException { int headerSize = HConstants.HFILEBLOCK_HEADER_SIZE_NO_CHECKSUM; // Create some KVs and create the block with old-style header. List<KeyValue> kvs = generator.generateTestKeyValues(60, useTags); ByteBuffer keyValues = RedundantKVGenerator.convertKvToByteBuffer(kvs, includesMemstoreTS); int size = keyValues.limit(); ByteBuffer buf = ByteBuffer.allocate(size + headerSize); buf.position(headerSize); keyValues.rewind(); buf.put(keyValues); HFileContext hfileContext = new HFileContextBuilder().withHBaseCheckSum(false) .withIncludesMvcc(includesMemstoreTS) .withIncludesTags(useTags) .withBlockSize(0) .withChecksumType(ChecksumType.NULL) .build(); HFileBlock block = new HFileBlock(BlockType.DATA, size, size, -1, buf, HFileBlock.FILL_HEADER, 0, 0, -1, hfileContext); HFileBlock cacheBlock = createBlockOnDisk(kvs, block, useTags); assertEquals(headerSize, cacheBlock.getDummyHeaderForVersion().length); }
private void addStoreFile() throws IOException { HStoreFile f = this.store.getStorefiles().iterator().next(); Path storedir = f.getPath().getParent(); long seqid = this.store.getMaxSequenceId().orElse(0L); Configuration c = TEST_UTIL.getConfiguration(); FileSystem fs = FileSystem.get(c); HFileContext fileContext = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL).build(); StoreFileWriter w = new StoreFileWriter.Builder(c, new CacheConfig(c), fs) .withOutputDir(storedir) .withFileContext(fileContext) .build(); w.appendMetadata(seqid + 1, false); w.close(); LOG.info("Added store file:" + w.getPath()); }
private void metablocks(final String compress) throws Exception { Path mFile = new Path(ROOT_DIR, "meta.hfile"); FSDataOutputStream fout = createFSOutput(mFile); HFileContext meta = new HFileContextBuilder() .withCompression(HFileWriterImpl.compressionByName(compress)) .withBlockSize(minBlockSize).build(); Writer writer = HFile.getWriterFactory(conf, cacheConf) .withOutputStream(fout) .withFileContext(meta) .create(); someTestingWithMetaBlock(writer); writer.close(); fout.close(); FSDataInputStream fin = fs.open(mFile); Reader reader = HFile.createReaderFromStream(mFile, fs.open(mFile), this.fs.getFileStatus(mFile).getLen(), cacheConf, conf); reader.loadFileInfo(); // No data -- this should return false. assertFalse(reader.getScanner(false, false).seekTo()); someReadingWithMetaBlock(reader); fs.delete(mFile, true); reader.close(); fin.close(); }
@Test public void testNullMetaBlocks() throws Exception { for (Compression.Algorithm compressAlgo : HBaseCommonTestingUtility.COMPRESSION_ALGORITHMS) { Path mFile = new Path(ROOT_DIR, "nometa_" + compressAlgo + ".hfile"); FSDataOutputStream fout = createFSOutput(mFile); HFileContext meta = new HFileContextBuilder().withCompression(compressAlgo) .withBlockSize(minBlockSize).build(); Writer writer = HFile.getWriterFactory(conf, cacheConf) .withOutputStream(fout) .withFileContext(meta) .create(); KeyValue kv = new KeyValue("foo".getBytes(), "f1".getBytes(), null, "value".getBytes()); writer.append(kv); writer.close(); fout.close(); Reader reader = HFile.createReader(fs, mFile, cacheConf, true, conf); reader.loadFileInfo(); assertNull(reader.getMetaBlock("non-existant", false)); } }
private Path writeStoreFile() throws IOException { Path storeFileParentDir = new Path(TEST_UTIL.getDataTestDir(), "TestHFile"); HFileContext meta = new HFileContextBuilder().withBlockSize(64 * 1024).build(); StoreFileWriter sfw = new StoreFileWriter.Builder(conf, fs).withOutputDir(storeFileParentDir) .withComparator(CellComparatorImpl.COMPARATOR).withFileContext(meta).build(); final int rowLen = 32; Random RNG = new Random(); for (int i = 0; i < 1000; ++i) { byte[] k = RandomKeyValueUtil.randomOrderedKey(RNG, i); byte[] v = RandomKeyValueUtil.randomValue(RNG); int cfLen = RNG.nextInt(k.length - rowLen + 1); KeyValue kv = new KeyValue(k, 0, rowLen, k, rowLen, cfLen, k, rowLen + cfLen, k.length - rowLen - cfLen, RNG.nextLong(), generateKeyType(RNG), v, 0, v.length); sfw.append(kv); } sfw.close(); return sfw.getPath(); }
@Override void setUp() throws Exception { HFileContextBuilder builder = new HFileContextBuilder() .withCompression(HFileWriterImpl.compressionByName(codec)) .withBlockSize(RFILE_BLOCKSIZE); if (cipher == "aes") { byte[] cipherKey = new byte[AES.KEY_LENGTH]; new SecureRandom().nextBytes(cipherKey); builder.withEncryptionContext(Encryption.newContext(conf) .setCipher(Encryption.getCipher(conf, cipher)) .setKey(cipherKey)); } else if (!"none".equals(cipher)) { throw new IOException("Cipher " + cipher + " not supported."); } HFileContext hFileContext = builder.build(); writer = HFile.getWriterFactoryNoCache(conf) .withPath(fs, mf) .withFileContext(hFileContext) .withComparator(CellComparator.getInstance()) .create(); }
@Test public void testBloomFilter() throws Exception { FileSystem fs = FileSystem.getLocal(conf); conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, (float) 0.01); conf.setBoolean(BloomFilterFactory.IO_STOREFILE_BLOOM_ENABLED, true); // write the file Path f = new Path(ROOT_DIR, getName()); HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL) .withChecksumType(CKTYPE) .withBytesPerCheckSum(CKBYTES).build(); // Make a store file and write data to it. StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs) .withFilePath(f) .withBloomType(BloomType.ROW) .withMaxKeyCount(2000) .withFileContext(meta) .build(); bloomWriteRead(writer, fs); }
private HFileContext createFileContext(Compression.Algorithm compression, boolean includeMVCCReadpoint, boolean includesTag, Encryption.Context cryptoContext) { if (compression == null) { compression = HFile.DEFAULT_COMPRESSION_ALGORITHM; } HFileContext hFileContext = new HFileContextBuilder() .withIncludesMvcc(includeMVCCReadpoint) .withIncludesTags(includesTag) .withCompression(compression) .withCompressTags(family.isCompressTags()) .withChecksumType(checksumType) .withBytesPerCheckSum(bytesPerChecksum) .withBlockSize(blocksize) .withHBaseCheckSum(true) .withDataBlockEncoding(family.getDataBlockEncoding()) .withEncryptionContext(cryptoContext) .withCreateTime(EnvironmentEdgeManager.currentTime()) .build(); return hFileContext; }
/** * Test for HBASE-8012 */ @Test public void testReseek() throws Exception { // write the file Path f = new Path(ROOT_DIR, getName()); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); // Make a store file and write data to it. StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs) .withFilePath(f) .withFileContext(meta) .build(); writeStoreFile(writer); writer.close(); StoreFileReader reader = new StoreFileReader(fs, f, cacheConf, true, new AtomicInteger(0), true, conf); // Now do reseek with empty KV to position to the beginning of the file KeyValue k = KeyValueUtil.createFirstOnRow(HConstants.EMPTY_BYTE_ARRAY); StoreFileScanner s = getStoreFileScanner(reader, false, false); s.reseek(k); assertNotNull("Intial reseek should position at the beginning of the file", s.peek()); }
/** * Create an HFile with the given number of rows with a specified value. */ public static void createHFile(FileSystem fs, Path path, byte[] family, byte[] qualifier, byte[] value, int numRows) throws IOException { HFileContext context = new HFileContextBuilder().withBlockSize(BLOCKSIZE) .withCompression(COMPRESSION) .build(); HFile.Writer writer = HFile .getWriterFactory(conf, new CacheConfig(conf)) .withPath(fs, path) .withFileContext(context) .create(); long now = System.currentTimeMillis(); try { // subtract 2 since iterateOnSplits doesn't include boundary keys for (int i = 0; i < numRows; i++) { KeyValue kv = new KeyValue(rowkey(i), family, qualifier, now, value); writer.append(kv); } writer.appendFileInfo(BULKLOAD_TIME_KEY, Bytes.toBytes(now)); } finally { writer.close(); } }
/** * Write a file and then assert that we can read from top and bottom halves * using two HalfMapFiles. * @throws Exception */ @Test public void testBasicHalfMapFile() throws Exception { final HRegionInfo hri = new HRegionInfo(TableName.valueOf("testBasicHalfMapFileTb")); HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem( conf, fs, new Path(testDir, hri.getTable().getNameAsString()), hri); HFileContext meta = new HFileContextBuilder().withBlockSize(2*1024).build(); StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs) .withFilePath(regionFs.createTempName()) .withFileContext(meta) .build(); writeStoreFile(writer); Path sfPath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath()); HStoreFile sf = new HStoreFile(this.fs, sfPath, conf, cacheConf, BloomType.NONE, true); checkHalfHFile(regionFs, sf); }
/** * Test encoding with offheap keyvalue. This test just verifies if the encoders * work with DBB and does not use the getXXXArray() API * @throws IOException */ @Test public void testEncodingWithOffheapKeyValue() throws IOException { // usually we have just block without headers, but don't complicate that try { List<Cell> kvs = generator.generateTestExtendedOffheapKeyValues(60, true); HFileContext meta = new HFileContextBuilder().withIncludesMvcc(includesMemstoreTS) .withIncludesTags(true).withHBaseCheckSum(true).withCompression(Algorithm.NONE) .withBlockSize(0).withChecksumType(ChecksumType.NULL).build(); writeBlock(kvs, meta, true); } catch (IllegalArgumentException e) { fail("No exception should have been thrown"); } }
@Test public void testGetScanner() throws Exception { Path testDir = TEST_UTIL.getDataTestDir(); FileSystem fs = testDir.getFileSystem(conf); HFileContext meta = new HFileContextBuilder().withBlockSize(8*1024).build(); StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, fs) .withOutputDir(testDir) .withFileContext(meta) .build(); MobTestUtil.writeStoreFile(writer, testName.getMethodName()); MobFile mobFile = new MobFile(new HStoreFile(fs, writer.getPath(), conf, cacheConf, BloomType.NONE, true)); assertNotNull(mobFile.getScanner()); assertTrue(mobFile.getScanner() instanceof StoreFileScanner); } }
Path makeNewFile(TagUsage tagUsage) throws IOException { Path ncTFile = new Path(TEST_UTIL.getDataTestDir(), "basic.hfile"); FSDataOutputStream fout = TEST_UTIL.getTestFileSystem().create(ncTFile); int blocksize = toKV("a", tagUsage).getLength() * 3; HFileContext context = new HFileContextBuilder().withBlockSize(blocksize) .withDataBlockEncoding(encoding) .withIncludesTags(true).build(); Configuration conf = TEST_UTIL.getConfiguration(); HFile.Writer writer = HFile.getWriterFactoryNoCache(conf).withOutputStream(fout) .withFileContext(context) .withComparator(CellComparatorImpl.COMPARATOR).create(); // 4 bytes * 3 * 2 for each key/value + // 3 for keys, 15 for values = 42 (woot) writer.append(toKV("c", tagUsage)); writer.append(toKV("e", tagUsage)); writer.append(toKV("g", tagUsage)); // block transition writer.append(toKV("i", tagUsage)); writer.append(toKV("k", tagUsage)); writer.close(); fout.close(); return ncTFile; }
@SuppressWarnings("SelfComparison") @Test public void testCompare() throws Exception { String caseName = testName.getMethodName(); Path testDir = TEST_UTIL.getDataTestDir(); FileSystem fs = testDir.getFileSystem(conf); Path outputDir1 = new Path(testDir, FAMILY1); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); StoreFileWriter writer1 = new StoreFileWriter.Builder(conf, cacheConf, fs) .withOutputDir(outputDir1).withFileContext(meta).build(); MobTestUtil.writeStoreFile(writer1, caseName); CachedMobFile cachedMobFile1 = CachedMobFile.create(fs, writer1.getPath(), conf, cacheConf); Path outputDir2 = new Path(testDir, FAMILY2); StoreFileWriter writer2 = new StoreFileWriter.Builder(conf, cacheConf, fs) .withOutputDir(outputDir2) .withFileContext(meta) .build(); MobTestUtil.writeStoreFile(writer2, caseName); CachedMobFile cachedMobFile2 = CachedMobFile.create(fs, writer2.getPath(), conf, cacheConf); cachedMobFile1.access(1); cachedMobFile2.access(2); assertEquals(1, cachedMobFile1.compareTo(cachedMobFile2)); assertEquals(-1, cachedMobFile2.compareTo(cachedMobFile1)); assertEquals(0, cachedMobFile1.compareTo(cachedMobFile1)); }
@Test public void testStoreFileReference() throws Exception { final RegionInfo hri = RegionInfoBuilder.newBuilder(TableName.valueOf("testStoreFileReference")).build(); HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(conf, fs, new Path(testDir, hri.getTable().getNameAsString()), hri); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); // Make a store file and write data to it. StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs) .withFilePath(regionFs.createTempName()).withFileContext(meta).build(); writeStoreFile(writer); Path hsfPath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath()); writer.close(); HStoreFile file = new HStoreFile(this.fs, hsfPath, conf, cacheConf, BloomType.NONE, true); file.initReader(); StoreFileReader r = file.getReader(); assertNotNull(r); StoreFileScanner scanner = new StoreFileScanner(r, mock(HFileScanner.class), false, false, 0, 0, false); // Verify after instantiating scanner refCount is increased assertTrue("Verify file is being referenced", file.isReferencedInReads()); scanner.close(); // Verify after closing scanner refCount is decreased assertFalse("Verify file is not being referenced", file.isReferencedInReads()); }
@Test public void testOpenClose() throws Exception { String caseName = testName.getMethodName(); Path testDir = TEST_UTIL.getDataTestDir(); FileSystem fs = testDir.getFileSystem(conf); HFileContext meta = new HFileContextBuilder().withBlockSize(8*1024).build(); StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, fs) .withOutputDir(testDir).withFileContext(meta).build(); MobTestUtil.writeStoreFile(writer, caseName); CachedMobFile cachedMobFile = CachedMobFile.create(fs, writer.getPath(), conf, cacheConf); assertEquals(EXPECTED_REFERENCE_ZERO, cachedMobFile.getReferenceCount()); cachedMobFile.open(); assertEquals(EXPECTED_REFERENCE_ONE, cachedMobFile.getReferenceCount()); cachedMobFile.open(); assertEquals(EXPECTED_REFERENCE_TWO, cachedMobFile.getReferenceCount()); cachedMobFile.close(); assertEquals(EXPECTED_REFERENCE_ONE, cachedMobFile.getReferenceCount()); cachedMobFile.close(); assertEquals(EXPECTED_REFERENCE_ZERO, cachedMobFile.getReferenceCount()); }