private MappingIterator<Entry<KeyType>> read(final File file, final Class<KeyType> keyClazz) { try { return spillMapper.readValues( spillMapper.getFactory().createParser(new LZ4BlockInputStream(new FileInputStream(file))), spillMapper.getTypeFactory().constructParametricType(Entry.class, keyClazz) ); } catch (IOException e) { throw Throwables.propagate(e); } }
private static DbFileSources.Data decodeRegularSourceData(byte[] binaryData) throws IOException { try (LZ4BlockInputStream lz4Input = new LZ4BlockInputStream(new ByteArrayInputStream(binaryData))) { return DbFileSources.Data.parseFrom(lz4Input); } catch (InvalidProtocolBufferException e) { if (SIZE_LIMIT_EXCEEDED_EXCEPTION_MESSAGE.equals(e.getMessage())) { return decodeHugeSourceData(binaryData); } throw e; } }
/** * Returns a dictionary of string keys added to this grouper. Note that the dictionary of keySerde is spilled on * local storage whenever the inner grouper is spilled. If there are spilled dictionaries, this method loads them * from disk and returns a merged dictionary. * * @return a dictionary which is a list of unique strings */ public List<String> mergeAndGetDictionary() { final Set<String> mergedDictionary = new HashSet<>(); mergedDictionary.addAll(keySerde.getDictionary()); for (File dictFile : dictionaryFiles) { try ( final MappingIterator<String> dictIterator = spillMapper.readValues( spillMapper.getFactory().createParser(new LZ4BlockInputStream(new FileInputStream(dictFile))), spillMapper.getTypeFactory().constructType(String.class) ) ) { while (dictIterator.hasNext()) { mergedDictionary.add(dictIterator.next()); } } catch (IOException e) { throw new RuntimeException(e); } } return new ArrayList<>(mergedDictionary); }
private static DbFileSources.Data decodeHugeSourceData(byte[] binaryData) throws IOException { try (LZ4BlockInputStream lz4Input = new LZ4BlockInputStream(new ByteArrayInputStream(binaryData))) { CodedInputStream input = CodedInputStream.newInstance(lz4Input); input.setSizeLimit(Integer.MAX_VALUE); return DbFileSources.Data.parseFrom(input); } }
private void initializeStreams(byte[] uncompressedData) throws IOException { uncompressedStream = new ByteArrayInputStream(uncompressedData); compressingStream = new LZ4CompressingInputStream(uncompressedStream); decompressingStream = new LZ4BlockInputStream(compressingStream); }
@Override public Map<Long, InputStream> loadStreams(Transaction t, Set<Long> ids) { Map<Long, InputStream> compressedStreams = super.loadStreams(t, ids); return Maps.transformValues(compressedStreams, stream -> { return new LZ4BlockInputStream(stream); }); }
@Override protected void tryWriteStreamToFile(Transaction transaction, Long id, StreamMetadata metadata, FileOutputStream fos) throws IOException { try (InputStream blockStream = makeStreamUsingTransaction(transaction, id, metadata); InputStream decompressingStream = new LZ4BlockInputStream(blockStream); OutputStream fileStream = fos;) { ByteStreams.copy(decompressingStream, fileStream); } }
@Override public InputStream loadStream(Transaction t, final Long id) { return new LZ4BlockInputStream(super.loadStream(t, id)); }
@Override public InputStream getInputStream(final InputStream in) throws IOException { return new LZ4BlockInputStream(in); }
public static byte[] uncompressString(String zippedBase64Str) throws IOException { byte[] result = null; byte[] bytes = Base64.decodeBase64(zippedBase64Str); InputStream zi = null; try { zi = new LZ4BlockInputStream(new ByteArrayInputStream(bytes)); result = IOUtils.toByteArray(zi); } finally { IOUtils.closeQuietly(zi); } return result; }
public static void decompressFile(File src, File dst) throws IOException { if (!dst.getParentFile().exists()) dst.getParentFile().mkdirs(); FileOutputStream fos = new FileOutputStream(dst); FileInputStream fis = new FileInputStream(src); LZ4BlockInputStream is = new LZ4BlockInputStream(fis, lz4Decompressor); IOUtils.copy(is, fos); fos.flush(); fos.close(); fis.close(); }
@Override public <T extends Serializable> T readObject( @Nonnull final InputStream stream, @Nonnull final Class<T> clazz ) throws IOException { try (final LZ4BlockInputStream decompressed = new LZ4BlockInputStream(stream)) { return provider.readObject(decompressed, clazz); } }
private MappingIterator<Entry<KeyType>> read(final File file, final Class<KeyType> keyClazz) { try { return spillMapper.readValues( spillMapper.getFactory().createParser(new LZ4BlockInputStream(new FileInputStream(file))), spillMapper.getTypeFactory().constructParametricType(Entry.class, keyClazz) ); } catch (IOException e) { throw Throwables.propagate(e); } }
private MappingIterator<Entry<KeyType>> read(final File file, final Class<KeyType> keyClazz) { try { return spillMapper.readValues( spillMapper.getFactory().createParser(new LZ4BlockInputStream(new FileInputStream(file))), spillMapper.getTypeFactory().constructParametricType(Entry.class, keyClazz) ); } catch (IOException e) { throw Throwables.propagate(e); } }
private static DbFileSources.Data decodeRegularSourceData(byte[] binaryData) throws IOException { try (LZ4BlockInputStream lz4Input = new LZ4BlockInputStream(new ByteArrayInputStream(binaryData))) { return DbFileSources.Data.parseFrom(lz4Input); } catch (InvalidProtocolBufferException e) { if (SIZE_LIMIT_EXCEEDED_EXCEPTION_MESSAGE.equals(e.getMessage())) { return decodeHugeSourceData(binaryData); } throw e; } }
private static DbFileSources.Data decodeHugeSourceData(byte[] binaryData) throws IOException { try (LZ4BlockInputStream lz4Input = new LZ4BlockInputStream(new ByteArrayInputStream(binaryData))) { CodedInputStream input = CodedInputStream.newInstance(lz4Input); input.setSizeLimit(Integer.MAX_VALUE); return DbFileSources.Data.parseFrom(input); } }
@Override public RevObject read(ObjectId id, InputStream in) throws IOException { LZ4FastDecompressor decompressor = lz4factory.fastDecompressor(); Checksum checksum = newChecksum(); LZ4BlockInputStream cin = new LZ4BlockInputStream(in, decompressor, checksum); return factory.read(id, cin); }
@Override public RevObject read(ObjectId id, InputStream in) throws IOException { LZ4FastDecompressor decompressor = lz4factory.fastDecompressor(); Checksum checksum = newChecksum(); LZ4BlockInputStream cin = new LZ4BlockInputStream(in, decompressor, checksum); return factory.read(id, cin); }
@Override public Object deserialize(final InputStream is) throws Exception { final LZ4BlockInputStream lz4is = new LZ4BlockInputStream(is, LZ4Factory.fastestInstance().fastDecompressor()); return getDelegate().deserialize(lz4is); }
@Override public String handle(HttpClientResponse response) throws IOException { Assert.assertEquals(HttpStatus.OK_200, response.getStatusCode()); Assert.assertEquals("lz4", response.getHeader(HttpHeaders.CONTENT_ENCODING)); return IOUtils.toString(new LZ4BlockInputStream(response.getResponseBodyAsStream()), Charsets.UTF_8); } }