public Builder(ChannelProxy channel) { this.channel = channel; this.path = channel.filePath(); }
@Override public String toString() { return filePath(); } }
/** * @return Path to the file this factory is referencing */ public String path() { return channel.filePath(); }
public String name() { return channel.filePath(); }
public Builder(ChannelProxy channel) { this.channel = channel; this.path = channel.filePath(); }
public String name() { return channel.filePath(); }
@Override public String toString() { return String.format("%s(%s - data length %d)", getClass().getSimpleName(), channel.filePath(), fileLength()); } }
@Override public String toString() { return String.format("%s(%s - chunk length %d, data length %d)", getClass().getSimpleName(), channel.filePath(), bufferSize, fileLength()); } }
public Key(ChunkReader file, long position) { super(); this.file = file; this.position = position; this.path = file.channel().filePath(); }
@Override public String toString() { return String.format("%s(%s - data length %d)", getClass().getSimpleName(), channel.filePath(), fileLength()); } }
@Override public String toString() { return String.format("%s(%s - chunk length %d, data length %d)", getClass().getSimpleName(), channel.filePath(), bufferSize, fileLength()); } }
public Key(ChunkReader file, long position) { super(); this.file = file; this.position = position; this.path = file.channel().filePath(); }
public Key(ChunkReader file, long position) { super(); this.file = file; this.position = position; this.path = file.channel().filePath(); }
@Override public String toString() { return String.format("CompressedChunkReader.%s(%s - %s, chunk length %d, data length %d)", getClass().getSimpleName(), channel.filePath(), metadata.compressor().getClass().getSimpleName(), metadata.chunkLength(), metadata.dataLength); }
@Override public String toString() { return String.format("CompressedChunkReader.%s(%s - %s, chunk length %d, data length %d)", getClass().getSimpleName(), channel.filePath(), metadata.compressor().getClass().getSimpleName(), metadata.chunkLength(), metadata.dataLength); }
public ChannelProxy sharedCopy() { try { FSDataInputStream inputStream = HadoopFileUtils.buildInputStream(this.fs, this.filePath, this.bufferSize); Cleanup cleanup = new Cleanup(this.filePath(), inputStream); return new ChannelProxy(cleanup, this.fs, inputStream, this.filePath, this.bufferSize); } catch (IOException e) { e.printStackTrace(); //TODO: using logging throw new RuntimeException((e.getCause())); } }
public void tidy() { try { Throwables.maybeFail(state.close(null)); } catch (Exception e) { throw new FSReadError(e, state.channel.filePath()); } } }
public void tidy() { try { Throwables.maybeFail(state.close(null)); } catch (Exception e) { throw new FSReadError(e, state.channel.filePath()); } } }
public void tidy() { try { Throwables.maybeFail(state.close(null)); } catch (Exception e) { throw new FSReadError(e, state.channel.filePath()); } } }