@Override public void addRow(Row t) throws HiveException { if ( willSpill() ) { setupWriter(); PTFRecordWriter rw = (PTFRecordWriter) getRecordWriter(); BlockInfo blkInfo = new BlockInfo(); try { blkInfo.startOffset = rw.outStream.getLength(); blockInfos.add(blkInfo); } catch(IOException e) { clearRows(); LOG.error(e.toString(), e); throw new HiveException(e); } } super.addRow(t); }
@Override public void addRow(Row t) throws HiveException { if ( willSpill() ) { setupWriter(); PTFRecordWriter rw = (PTFRecordWriter) getRecordWriter(); BlockInfo blkInfo = new BlockInfo(); try { blkInfo.startOffset = rw.outStream.getLength(); blockInfos.add(blkInfo); } catch(IOException e) { clearRows(); LOG.error(e.toString(), e); throw new HiveException(e); } } super.addRow(t); }
@Override public long getPosition() throws IOException { return internalWriter.getLength(); }
@Override public void write(T model) throws IOException { copyFromModel(model, keyBuffer, valueBuffer); writer.append(keyBuffer, valueBuffer); long nextPosition = writer.getLength(); counter.add(nextPosition - lastPosition); lastPosition = nextPosition; }
@Override public void doCommit(Long txId) throws IOException { if (this.rotationPolicy.mark(this.writer.getLength())) { rotateOutputFile(); this.rotationPolicy.reset(); } else { this.writer.hsync(); } }
protected long getPosition(Writer writer) throws IOException { if (writer != null) { return writer.getLength(); } else { return -1; } }
@Override public long getLength() throws IOException { try { return this.writer.getLength(); } catch (NullPointerException npe) { // Concurrent close... throw new IOException(npe); } }
@Override protected void doWrite(Tuple tuple) throws IOException { this.writer.append(this.format.key(tuple), this.format.value(tuple)); this.offset = this.writer.getLength(); }
/** Append a key/value pair to the map. The key must be greater or equal * to the previous key added to the map. */ public synchronized void append(WritableComparable key, Writable val) throws IOException { checkKey(key); if (size % indexInterval == 0) { // add an index entry position.set(data.getLength()); // point to current eof index.append(key, position); } data.append(key, val); // append key/value to data size++; }
/** Append a key/value pair to the map. The key must be greater or equal * to the previous key added to the map. */ public synchronized void append(WritableComparable key, Writable val) throws IOException { checkKey(key); if (size % indexInterval == 0) { // add an index entry position.set(data.getLength()); // point to current eof index.append(key, position); } data.append(key, val); // append key/value to data size++; }
key.set( 100 - i); value.set( DATA[ i % DATA.length]); System.out.printf("[% s]\t% s\t% s\n", writer.getLength(), key, value); writer.append( key, value); } } finally
public static void copyTo64MB(String src, String dst) throws IOException { Configuration hconf = new Configuration(); Path srcPath = new Path(src); Path dstPath = new Path(dst); FileSystem fs = FileSystem.get(hconf); long srcSize = fs.getFileStatus(srcPath).getLen(); int copyTimes = (int) (67108864 / srcSize); // 64 MB System.out.println("Copy " + copyTimes + " times"); Reader reader = new Reader(hconf, SequenceFile.Reader.file(srcPath)); Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), hconf); Text value = new Text(); Writer writer = SequenceFile.createWriter(hconf, Writer.file(dstPath), Writer.keyClass(key.getClass()), Writer.valueClass(Text.class), Writer.compression(CompressionType.BLOCK, getLZOCodec(hconf))); int count = 0; while (reader.next(key, value)) { for (int i = 0; i < copyTimes; i++) { writer.append(key, value); count++; } } System.out.println("Len: " + writer.getLength()); System.out.println("Rows: " + count); reader.close(); writer.close(); }
/** Append a key/value pair to the map. The key must be greater or equal * to the previous key added to the map. */ public synchronized void append(WritableComparable key, Writable val) throws IOException { checkKey(key); long pos = data.getLength(); // Only write an index if we've changed positions. In a block compressed // file, this means we write an entry at the start of each block if (size >= lastIndexKeyCount + indexInterval && pos > lastIndexPos) { position.set(pos); // point to current eof index.append(key, position); lastIndexPos = pos; lastIndexKeyCount = size; } data.append(key, val); // append key/value to data size++; }
@Override public long getLength() throws IOException { return this.mWriter.getLength(); }
@Override public long getLength() throws IOException { return this.mWriter.getLength(); }
public long getOffset() throws IOException { return writer.getLength(); }
@Override public long getCurrentOffset() throws IOException { return this.writer.getLength(); }
@Override protected void doWrite(Tuple tuple) throws IOException { this.writer.append(this.format.key(tuple), this.format.value(tuple)); this.offset = this.writer.getLength(); }
@Override public void doCommit(Long txId) throws IOException { if (this.rotationPolicy.mark(this.writer.getLength())) { rotateOutputFile(); this.rotationPolicy.reset(); } else { this.writer.hsync(); } }