@Override public String toString(T obj) throws IOException { outBuf.reset(); serializer.serialize(obj); byte[] buf = new byte[outBuf.getLength()]; System.arraycopy(outBuf.getData(), 0, buf, 0, buf.length); return new String(Base64.encodeBase64(buf), StandardCharsets.UTF_8); }
/** Set to contain the contents of a string. */ public void set(String string) { if (string.length() > 0xffff/3) { // maybe too long LOG.warn("truncating long string: " + string.length() + " chars, starting with " + string.substring(0, 20)); string = string.substring(0, 0xffff/3); } length = utf8Length(string); // compute length if (length > 0xffff) // double-check length throw new RuntimeException("string too long!"); if (bytes == null || length > bytes.length) // grow buffer bytes = new byte[length]; try { // avoid sync'd allocations DataOutputBuffer obuf = OBUF_FACTORY.get(); obuf.reset(); writeChars(obuf, string, 0, string.length()); System.arraycopy(obuf.getData(), 0, bytes, 0, length); } catch (IOException e) { throw new RuntimeException(e); } }
@Deprecated public static void cloneWritableInto(Writable dst, Writable src) throws IOException { CopyInCopyOutBuffer buffer = CLONE_BUFFERS.get(); buffer.outBuffer.reset(); src.write(buffer.outBuffer); buffer.moveData(); dst.readFields(buffer.inBuffer); }
/** Convert a string to a UTF-8 encoded byte array. * @see String#getBytes(String) */ public static byte[] getBytes(String string) { byte[] result = new byte[utf8Length(string)]; try { // avoid sync'd allocations DataOutputBuffer obuf = OBUF_FACTORY.get(); obuf.reset(); writeChars(obuf, string, 0, string.length()); System.arraycopy(obuf.getData(), 0, result, 0, obuf.getLength()); } catch (IOException e) { throw new RuntimeException(e); } return result; }
/** Compress and flush contents to dfs */ @Override public synchronized void sync() throws IOException { if (noBufferedRecords > 0) { super.sync(); // No. of records WritableUtils.writeVInt(out, noBufferedRecords); // Write 'keys' and lengths writeBuffer(keyLenBuffer); writeBuffer(keyBuffer); // Write 'values' and lengths writeBuffer(valLenBuffer); writeBuffer(valBuffer); // Flush the file-stream out.flush(); // Reset internal states keyLenBuffer.reset(); keyBuffer.reset(); valLenBuffer.reset(); valBuffer.reset(); noBufferedRecords = 0; } }
/** * Checks if this MapFile has the indicated key. The membership test is * performed using a Bloom filter, so the result has always non-zero * probability of false positives. * @param key key to check * @return false iff key doesn't exist, true if key probably exists. * @throws IOException */ public boolean probablyHasKey(WritableComparable key) throws IOException { if (bloomFilter == null) { return true; } buf.reset(); key.write(buf); bloomKey.set(byteArrayForBloomKey(buf), 1.0); return bloomFilter.membershipTest(bloomKey); }
/** * Emits a (reduce) group row, made from the key (copied in at the beginning of the group) and * the row aggregation buffers values * @param agg * @param buffer * @throws HiveException */ private void writeGroupRow(VectorAggregationBufferRow agg, DataOutputBuffer buffer) throws HiveException { int colNum = outputKeyLength; // Start after group keys. final int batchIndex = outputBatch.size; for (int i = 0; i < aggregators.length; ++i) { aggregators[i].assignRowColumn(outputBatch, batchIndex, colNum++, agg.getAggregationBuffer(i)); } ++outputBatch.size; if (outputBatch.size == VectorizedRowBatch.DEFAULT_SIZE) { flushOutput(); buffer.reset(); } }
/** Workhorse to check and write out compressed data/lengths */ private synchronized void writeBuffer(DataOutputBuffer uncompressedDataBuffer) throws IOException { deflateFilter.resetState(); buffer.reset(); deflateOut.write(uncompressedDataBuffer.getData(), 0, uncompressedDataBuffer.getLength()); deflateOut.flush(); deflateFilter.finish(); WritableUtils.writeVInt(out, buffer.getLength()); out.write(buffer.getData(), 0, buffer.getLength()); }
/** Append a key/value pair. */ @Override @SuppressWarnings("unchecked") public synchronized void append(Object key, Object val) throws IOException { if (key.getClass() != keyClass) throw new IOException("wrong key class: "+key.getClass().getName() +" is not "+keyClass); if (val.getClass() != valClass) throw new IOException("wrong value class: "+val.getClass().getName() +" is not "+valClass); buffer.reset(); // Append the 'key' keySerializer.serialize(key); int keyLength = buffer.getLength(); if (keyLength < 0) throw new IOException("negative length keys not allowed: " + key); // Compress 'value' and append it deflateFilter.resetState(); compressedValSerializer.serialize(val); deflateOut.flush(); deflateFilter.finish(); // Write the record out checkAndWriteSync(); // sync out.writeInt(buffer.getLength()); // total record length out.writeInt(keyLength); // key portion length out.write(buffer.getData(), 0, buffer.getLength()); // data }
@Override public synchronized void append(WritableComparable key, Writable val) throws IOException { super.append(key, val); buf.reset(); key.write(buf); bloomKey.set(byteArrayForBloomKey(buf), 1.0); bloomFilter.add(bloomKey); }
private void checkKey(WritableComparable key) throws IOException { // check that keys are well-ordered if (size != 0 && comparator.compare(lastKey, key) > 0) throw new IOException("key out of order: "+key+" after "+lastKey); // update lastKey with a copy of key by writing and reading outBuf.reset(); key.write(outBuf); // write new key inBuf.reset(outBuf.getData(), outBuf.getLength()); lastKey.readFields(inBuf); // read into lastKey }
+" is not "+valClass); buffer.reset();
@Override public int read() throws IOException { int ret; if (null == inbuf || -1 == (ret = inbuf.read())) { if (!r.next(key, val)) { return -1; } byte[] tmp = key.toString().getBytes(StandardCharsets.UTF_8); outbuf.write(tmp, 0, tmp.length); outbuf.write('\t'); tmp = val.toString().getBytes(StandardCharsets.UTF_8); outbuf.write(tmp, 0, tmp.length); outbuf.write('\n'); inbuf.reset(outbuf.getData(), outbuf.getLength()); outbuf.reset(); ret = inbuf.read(); } return ret; }
/** * Emits a (reduce) group row, made from the key (copied in at the beginning of the group) and * the row aggregation buffers values * @param agg * @param buffer * @throws HiveException */ private void writeGroupRow(VectorAggregationBufferRow agg, DataOutputBuffer buffer) throws HiveException { int fi = outputKeyLength; // Start after group keys. for (int i = 0; i < aggregators.length; ++i) { vectorAssignRow.assignRowColumn(outputBatch, outputBatch.size, fi++, aggregators[i].evaluateOutput(agg.getAggregationBuffer(i))); } ++outputBatch.size; if (outputBatch.size == VectorizedRowBatch.DEFAULT_SIZE) { flushOutput(); buffer.reset(); } }
/** * Make a copy of the writable object using serialization to a buffer * @param src the object to copy from * @param dst the object to copy into, which is destroyed * @return dst param (the copy) * @throws IOException */ @SuppressWarnings("unchecked") public static <T> T copy(Configuration conf, T src, T dst) throws IOException { CopyInCopyOutBuffer buffer = CLONE_BUFFERS.get(); buffer.outBuffer.reset(); SerializationFactory factory = getFactory(conf); Class<T> cls = (Class<T>) src.getClass(); Serializer<T> serializer = factory.getSerializer(cls); serializer.open(buffer.outBuffer); serializer.serialize(src); buffer.moveData(); Deserializer<T> deserializer = factory.getDeserializer(cls); deserializer.open(buffer.inBuffer); dst = deserializer.deserialize(dst); return dst; }
private void assertAuthMethodWrite(DataOutputBuffer out, AuthMethod authMethod) throws IOException { authMethod.write(out); assertEquals(authMethod.code, out.getData()[0]); out.reset(); }
@Override public boolean next(NullWritable nullWritable, VectorizedRowBatch vectorizedRowBatch ) throws IOException { vectorizedRowBatch.reset(); buffer.reset(); if (!innerReader.next(key, value)) { return false; } if (partitionValues != null) { rbCtx.addPartitionColsToBatch(vectorizedRowBatch, partitionValues); } try { VectorizedBatchUtil.acidAddRowToBatch(value, (StructObjectInspector) objectInspector, vectorizedRowBatch.size++, vectorizedRowBatch, rbCtx, buffer); while (vectorizedRowBatch.size < vectorizedRowBatch.selected.length && innerReader.next(key, value)) { VectorizedBatchUtil.acidAddRowToBatch(value, (StructObjectInspector) objectInspector, vectorizedRowBatch.size++, vectorizedRowBatch, rbCtx, buffer); } } catch (Exception e) { throw new IOException("error iterating", e); } return true; }
/** Fills up the rawKey object with the key returned by the Reader * @return true if there is a key returned; false, otherwise * @throws IOException */ public boolean nextRawKey() throws IOException { if (in == null) { int bufferSize = getBufferSize(conf); Reader reader = new Reader(conf, Reader.file(segmentPathName), Reader.bufferSize(bufferSize), Reader.start(segmentOffset), Reader.length(segmentLength)); //sometimes we ignore syncs especially for temp merge files if (ignoreSync) reader.ignoreSync(); if (reader.getKeyClass() != keyClass) throw new IOException("wrong key class: " + reader.getKeyClass() + " is not " + keyClass); if (reader.getValueClass() != valClass) throw new IOException("wrong value class: "+reader.getValueClass()+ " is not " + valClass); this.in = reader; rawKey = new DataOutputBuffer(); } rawKey.reset(); int keyLength = in.nextRawKey(rawKey); return (keyLength >= 0); }
public void write(DataOutput out) throws IOException { if (firstKey == null) { Utils.writeVInt(out, 0); return; } DataOutputBuffer dob = new DataOutputBuffer(); Utils.writeVInt(dob, firstKey.size()); dob.write(firstKey.buffer()); Utils.writeVInt(out, dob.size()); out.write(dob.getData(), 0, dob.getLength()); for (TFileIndexEntry entry : index) { dob.reset(); entry.write(dob); Utils.writeVInt(out, dob.getLength()); out.write(dob.getData(), 0, dob.getLength()); } } }