Refine search
/** * Define the sort order of the BytesWritable. * @param that The other bytes writable * @return Positive if left is bigger than right, 0 if they are equal, and * negative if left is smaller than right. */ @Override public int compareTo(ImmutableBytesWritable that) { return WritableComparator.compareBytes( this.bytes, this.offset, this.length, that.bytes, that.offset, that.length); }
@SuppressWarnings("rawtypes") private int compareKey(final WritableComparator comparators[], final int pos, final WritableComparable key_1, final WritableComparable key_2, final boolean nullsafe) { if (key_1 == null && key_2 == null) { if (nullsafe) { return 0; } else { return -1; } } else if (key_1 == null) { return -1; } else if (key_2 == null) { return 1; } if (comparators[pos] == null) { comparators[pos] = WritableComparator.get(key_1.getClass()); } return comparators[pos].compare(key_1, key_2); }
/** * @param b value * @param length length of the value * @return Runs {@link WritableComparator#hashBytes(byte[], int)} on the * passed in array. This method is what {@link org.apache.hadoop.io.Text} * use calculating hash code. */ public static int hashCode(final byte [] b, final int length) { return WritableComparator.hashBytes(b, length); }
/** * Pass skip key to child RRs. */ public void skip(K key) throws IOException { ArrayList<ComposableRecordReader<K, ?>> tmp = new ArrayList<ComposableRecordReader<K, ?>>(); while (!q.isEmpty() && cmp.compare(q.peek().key(), key) <= 0) { tmp.add(q.poll()); } for (ComposableRecordReader<K, ?> rr : tmp) { rr.skip(key); if (rr.hasNext()) { q.add(rr); } } }
@Override public int compare(Object a, Object b) { return compare((WritableComparable)a, (WritableComparable)b); }
if (keyClassOption == null) { this.comparator = comparatorOption.getValue(); keyClass = comparator.getKeyClass(); } else { keyClass= (Class<? extends WritableComparable>) keyClassOption.getValue(); this.comparator = WritableComparator.get(keyClass, conf); this.lastKey = comparator.newKey(); FileSystem fs = dirName.getFileSystem(conf);
/** Create a set naming the element class and compression type. */ public Writer(Configuration conf, FileSystem fs, String dirName, Class<? extends WritableComparable> keyClass, SequenceFile.CompressionType compress) throws IOException { this(conf, fs, dirName, WritableComparator.get(keyClass, conf), compress); }
@SuppressWarnings("unchecked") private void open(Path[] inMapFiles, Path outMapFile) throws IOException { inReaders = new Reader[inMapFiles.length]; for (int i = 0; i < inMapFiles.length; i++) { Reader reader = new Reader(inMapFiles[i], conf); if (keyClass == null || valueClass == null) { keyClass = (Class<WritableComparable>) reader.getKeyClass(); valueClass = (Class<Writable>) reader.getValueClass(); } else if (keyClass != reader.getKeyClass() || valueClass != reader.getValueClass()) { throw new HadoopIllegalArgumentException( "Input files cannot be merged as they" + " have different Key and Value classes"); } inReaders[i] = reader; } if (comparator == null) { Class<? extends WritableComparable> cls; cls = keyClass.asSubclass(WritableComparable.class); this.comparator = WritableComparator.get(cls, conf); } else if (comparator.getKeyClass() != keyClass) { throw new HadoopIllegalArgumentException( "Input files cannot be merged as they" + " have different Key class compared to" + " specified comparator"); } outWriter = new MapFile.Writer(conf, outMapFile, MapFile.Writer.keyClass(keyClass), MapFile.Writer.valueClass(valueClass)); }
/** * Test that Writable's are configured by Comparator. */ public void testConfigurableWritableComparator() throws Exception { Configuration conf = new Configuration(); conf.set(TEST_WRITABLE_CONFIG_PARAM, TEST_WRITABLE_CONFIG_VALUE); WritableComparator wc = WritableComparator.get(SimpleWritableComparable.class, conf); SimpleWritableComparable key = ((SimpleWritableComparable)wc.newKey()); assertNotNull(wc.getConf()); assertNotNull(key.getConf()); assertEquals(key.getConf().get(TEST_WRITABLE_CONFIG_PARAM), TEST_WRITABLE_CONFIG_VALUE); } }
protected WritableComparator(Class<? extends WritableComparable> keyClass, Configuration conf, boolean createInstances) { this.keyClass = keyClass; this.conf = (conf != null) ? conf : new Configuration(); if (createInstances) { key1 = newKey(); key2 = newKey(); buffer = new DataInputBuffer(); } else { key1 = key2 = null; buffer = null; } }
/** Create the named map using the named key comparator. */ public Writer(Configuration conf, FileSystem fs, String dirName, WritableComparator comparator, Class valClass, SequenceFile.CompressionType compress, CompressionCodec codec, Progressable progress) throws IOException { this.indexInterval = conf.getInt(INDEX_INTERVAL, this.indexInterval); this.comparator = comparator; this.lastKey = comparator.newKey(); Path dir = new Path(dirName); if (!fs.mkdirs(dir)) { throw new IOException("Mkdirs failed to create directory " + dir.toString()); } Path dataFile = new Path(dir, DATA_FILE_NAME); Path indexFile = new Path(dir, INDEX_FILE_NAME); Class keyClass = comparator.getKeyClass(); this.data = SequenceFile.createWriter (fs, conf, dataFile, keyClass, valClass, compress, codec, progress); this.index = SequenceFile.createWriter (fs, conf, indexFile, keyClass, LongWritable.class, CompressionType.BLOCK, progress); }
public Class<? extends WritableComparable> getKeyClass(){ return super.getKeyClass(); }
/** * Pass skip key to child RRs. */ public void skip(K key) throws IOException { ArrayList<ComposableRecordReader<K, ?>> tmp = new ArrayList<ComposableRecordReader<K, ?>>(); while (!q.isEmpty() && cmp.compare(q.peek().key(), key) <= 0) { tmp.add(q.poll()); } for (ComposableRecordReader<K, ?> rr : tmp) { rr.skip(key); if (rr.hasNext()) { q.add(rr); } } }
private int binarySearch(WritableComparable key) { int low = 0; int high = count-1; while (low <= high) { int mid = (low + high) >>> 1; WritableComparable midVal = keys[mid]; int cmp = comparator.compare(midVal, key); if (cmp < 0) low = mid + 1; else if (cmp > 0) high = mid - 1; else return mid; // key found } return -(low + 1); // key not found. }
if (keyClassOption == null) { this.comparator = comparatorOption.getValue(); keyClass = comparator.getKeyClass(); } else { keyClass= (Class<? extends WritableComparable>) keyClassOption.getValue(); this.comparator = WritableComparator.get(keyClass, conf); this.lastKey = comparator.newKey(); FileSystem fs = dirName.getFileSystem(conf);
/** Sort and merge files containing the named classes. */ public Sorter(FileSystem fs, Class<? extends WritableComparable> keyClass, Class valClass, Configuration conf) { this(fs, WritableComparator.get(keyClass, conf), keyClass, valClass, conf); }
@SuppressWarnings("unchecked") private void open(Path[] inMapFiles, Path outMapFile) throws IOException { inReaders = new Reader[inMapFiles.length]; for (int i = 0; i < inMapFiles.length; i++) { Reader reader = new Reader(inMapFiles[i], conf); if (keyClass == null || valueClass == null) { keyClass = (Class<WritableComparable>) reader.getKeyClass(); valueClass = (Class<Writable>) reader.getValueClass(); } else if (keyClass != reader.getKeyClass() || valueClass != reader.getValueClass()) { throw new HadoopIllegalArgumentException( "Input files cannot be merged as they" + " have different Key and Value classes"); } inReaders[i] = reader; } if (comparator == null) { Class<? extends WritableComparable> cls; cls = keyClass.asSubclass(WritableComparable.class); this.comparator = WritableComparator.get(cls, conf); } else if (comparator.getKeyClass() != keyClass) { throw new HadoopIllegalArgumentException( "Input files cannot be merged as they" + " have different Key class compared to" + " specified comparator"); } outWriter = new MapFile.Writer(conf, outMapFile, MapFile.Writer.keyClass(keyClass), MapFile.Writer.valueClass(valueClass)); }
/** * Test that Writable's are configured by Comparator. */ public void testConfigurableWritableComparator() throws Exception { Configuration conf = new Configuration(); conf.set(TEST_WRITABLE_CONFIG_PARAM, TEST_WRITABLE_CONFIG_VALUE); WritableComparator wc = WritableComparator.get(SimpleWritableComparable.class, conf); SimpleWritableComparable key = ((SimpleWritableComparable)wc.newKey()); assertNotNull(wc.getConf()); assertNotNull(key.getConf()); assertEquals(key.getConf().get(TEST_WRITABLE_CONFIG_PARAM), TEST_WRITABLE_CONFIG_VALUE); } }