@Override public void write(Writable r) throws IOException { if (r instanceof Text) { Text tr = (Text) r; outStream.write(tr.getBytes(), 0, tr.getLength()); } else { // DynamicSerDe always writes out BytesWritable BytesWritable bw = (BytesWritable) r; outStream.write(bw.get(), 0, bw.getSize()); } }
public int next(Writable row) throws IOException { int recordLength = in.read(bytes.get(), 0, maxRecordLength); if (recordLength >= 0) { bytes.setSize(recordLength); } return recordLength; }
tag = keyWritable.get()[size]; keyWritable = new BytesWritable(keyWritable.getBytes(), size); keyWritable.setSize(size); + Utilities.formatBinaryString(keyWritable.get(), 0, keyWritable.getSize()) + " with properties " + keyTableDesc.getProperties()); groupKey.set(keyWritable.get(), 0, keyWritable.getSize()); LOG.trace("Start Group"); reducer.setGroupKeyObject(keyObject);
tag = keyWritable.get()[size]; keyWritable = new BytesWritable(keyWritable.getBytes(), size); keyWritable.setSize(size); throw new HiveException( "Hive Runtime Error: Unable to deserialize reduce input key from " + Utilities.formatBinaryString(keyWritable.get(), 0, keyWritable.getSize()) + " with properties " + keyTableDesc.getProperties(), e); groupKey.set(keyWritable.get(), 0, keyWritable.getSize()); LOG.trace("Start Group"); reducer.setGroupKeyObject(keyObject);
@Override public void write(Writable r) throws IOException { if (r instanceof Text) { Text tr = (Text) r; outStream.write(tr.getBytes(), 0, tr.getLength()); } else { // DynamicSerDe always writes out BytesWritable BytesWritable bw = (BytesWritable) r; outStream.write(bw.get(), 0, bw.getSize()); } }
public int next(Writable row) throws IOException { int recordLength = in.read(bytes.get(), 0, maxRecordLength); if (recordLength >= 0) { bytes.setSize(recordLength); } return recordLength; }
@Override public void write(Writable r) throws IOException { if (r instanceof Text) { Text tr = (Text) r; outStream.write(tr.getBytes(), 0, tr.getLength()); outStream.write(finalRowSeparator); } else { // DynamicSerDe always writes out BytesWritable BytesWritable bw = (BytesWritable) r; outStream.write(bw.get(), 0, bw.getSize()); outStream.write(finalRowSeparator); } }
private Object makeResult(BytesWritable writable, JsonTypeSerializer serializer, String selection, StoreBuilderTransformation trans) { Object obj = serializer.toObject(writable.get()); if(selection != null) { Map m = (Map) obj; obj = m.get(selection); } if(trans != null) obj = trans.transform(obj); return obj; }
@Override public void write(Writable r) throws IOException { if (r instanceof Text) { Text tr = (Text) r; outStream.write(tr.getBytes(), 0, tr.getLength()); outStream.write(finalRowSeparator); } else { // DynamicSerDe always writes out BytesWritable BytesWritable bw = (BytesWritable) r; outStream.write(bw.get(), 0, bw.getSize()); outStream.write(finalRowSeparator); } }
public void next(BytesWritable key) { key.setSize(Math.max(MIN_KEY_LEN, keyLenRNG.nextInt())); random.nextBytes(key.get()); int rnd = 0; if (max != min) { rnd = random.nextInt(max - min); } int n = rnd + min; byte[] b = key.get(); b[0] = (byte) (n >> 24); b[1] = (byte) (n >> 16); b[2] = (byte) (n >> 8); b[3] = (byte) n; } }
@Override public void write(Writable r) throws IOException { if (keyIsText) { Text text = (Text) r; keyWritable.set(text.getBytes(), 0, text.getLength()); } else { BytesWritable bw = (BytesWritable) r; // Once we drop support for old Hadoop versions, change these // to getBytes() and getLength() to fix the deprecation warnings. // Not worth a shim. keyWritable.set(bw.get(), 0, bw.getSize()); } keyWritable.setHashCode(r.hashCode()); outStream.append(keyWritable, NULL_WRITABLE); }
private void fillValue(BytesWritable o) { int len = valLenRNG.nextInt(); o.setSize(len); int n = 0; while (n < len) { byte[] word = dict[random.nextInt(dict.length)]; int l = Math.min(word.length, len - n); System.arraycopy(word, 0, o.get(), n, l); n += l; } }
@Override public void write(Writable r) throws IOException { if (keyIsText) { Text text = (Text) r; keyWritable.set(text.getBytes(), 0, text.getLength()); } else { BytesWritable bw = (BytesWritable) r; // Once we drop support for old Hadoop versions, change these // to getBytes() and getLength() to fix the deprecation warnings. // Not worth a shim. keyWritable.set(bw.get(), 0, bw.getSize()); } keyWritable.setHashCode(r.hashCode()); outStream.append(keyWritable, NULL_WRITABLE); }
private void fillKey(BytesWritable o) { int len = keyLenRNG.nextInt(); if (len < MIN_KEY_LEN) len = MIN_KEY_LEN; o.setSize(len); int n = MIN_KEY_LEN; while (n < len) { byte[] word = dict[random.nextInt(dict.length)]; int l = Math.min(word.length, len - n); System.arraycopy(word, 0, o.get(), n, l); n += l; } if (sorted && WritableComparator.compareBytes(lastKey.get(), MIN_KEY_LEN, lastKey .getSize() - MIN_KEY_LEN, o.get(), MIN_KEY_LEN, o.getSize() - MIN_KEY_LEN) > 0) { incrementPrefix(); } System.arraycopy(prefix, 0, o.get(), 0, MIN_KEY_LEN); lastKey.set(o); }
+ tag + ") from " + Utilities.formatBinaryString(valueWritable.get(), 0, valueWritable.getSize()) + " with properties " + valueTableDesc[tag].getProperties());
tag = keyWritable.get()[size]; keyWritable.setSize(size); throw new HiveException( "Hive Runtime Error: Unable to deserialize reduce input key from " + Utilities.formatBinaryString(keyWritable.get(), 0, keyWritable.getSize()) + " with properties " + keyTableDesc.getProperties(), e); groupKey.set(keyWritable.get(), 0, keyWritable.getSize()); if (LOG.isTraceEnabled()) { LOG.trace("Start Group"); + tag + ") from " + Utilities.formatBinaryString(valueWritable.get(), 0, valueWritable.getSize()) + " with properties " + valueTableDesc[tag].getProperties(), e);
+ tag + ") from " + Utilities.formatBinaryString(valueWritable.get(), 0, valueWritable.getSize()) + " with properties " + valueTableDesc[tag].getProperties(), e);
tag = keyWritable.get()[size]; keyWritable.setSize(size); throw new HiveException( "Hive Runtime Error: Unable to deserialize reduce input key from " + Utilities.formatBinaryString(keyWritable.get(), 0, keyWritable.getSize()) + " with properties " + keyTableDesc.getProperties(), e); groupKey.set(keyWritable.get(), 0, keyWritable.getSize()); if (isTraceEnabled) { LOG.trace("Start Group"); + tag + ") from " + Utilities.formatBinaryString(valueWritable.get(), 0, valueWritable.getSize()) + " with properties " + valueTableDesc[tag].getProperties(), e);