Refine search
@SuppressWarnings("unchecked") private void open(Path[] inMapFiles, Path outMapFile) throws IOException { inReaders = new Reader[inMapFiles.length]; for (int i = 0; i < inMapFiles.length; i++) { Reader reader = new Reader(inMapFiles[i], conf); if (keyClass == null || valueClass == null) { keyClass = (Class<WritableComparable>) reader.getKeyClass(); valueClass = (Class<Writable>) reader.getValueClass(); } else if (keyClass != reader.getKeyClass() || valueClass != reader.getValueClass()) { throw new HadoopIllegalArgumentException( "Input files cannot be merged as they" + " have different Key and Value classes"); } inReaders[i] = reader; } if (comparator == null) { Class<? extends WritableComparable> cls; cls = keyClass.asSubclass(WritableComparable.class); this.comparator = WritableComparator.get(cls, conf); } else if (comparator.getKeyClass() != keyClass) { throw new HadoopIllegalArgumentException( "Input files cannot be merged as they" + " have different Key class compared to" + " specified comparator"); } outWriter = new MapFile.Writer(conf, outMapFile, MapFile.Writer.keyClass(keyClass), MapFile.Writer.valueClass(valueClass)); }
/** Append a value to the file. */ public synchronized void append(Writable value) throws IOException { super.append(count, value); // add to map count.set(count.get()+1); // increment count } }
Path out = FileOutputFormat.getOutputPath(job); Path text = new Path(new Path(out, ParseText.DIR_NAME), name); Path data = new Path(new Path(out, ParseData.DIR_NAME), name); Path linkdb = new Path(new Path(new Path(out, NutchData.getLinkDbName()), "current"), name); new MapFile.Writer(job, fs, text.toString(), Text.class, ParseText.class, CompressionType.RECORD, progress); new MapFile.Writer(job, fs, data.toString(), Text.class, ParseData.class, compType, progress); new MapFile.Writer(job, fs, linkdb.toString(), Text.class, Inlinks.class, compType, progress);
private void prepareWriter() throws IOException { if (reader != null) { reader.close(); reader = null; } if (writer == null) { writer = new MapFile.Writer(conf, fs, qualifiedDirName.toString(), Text.class, Text.class); } }
@Override public synchronized void append(WritableComparable key, Writable val) throws IOException { super.append(key, val); buf.reset(); key.write(buf); bloomKey.set(byteArrayForBloomKey(buf), 1.0); bloomFilter.add(bloomKey); }
private void close() throws IOException { for (int i = 0; i < inReaders.length; i++) { IOUtils.closeStream(inReaders[i]); inReaders[i] = null; } if (outWriter != null) { outWriter.close(); outWriter = null; } } }
/** Append a key/value pair to the map. The key must be greater or equal * to the previous key added to the map. */ public synchronized void append(WritableComparable key, Writable val) throws IOException { checkKey(key); long pos = data.getLength(); // Only write an index if we've changed positions. In a block compressed // file, this means we write an entry at the start of each block if (size >= lastIndexKeyCount + indexInterval && pos > lastIndexPos) { position.set(pos); // point to current eof index.append(key, position); lastIndexPos = pos; lastIndexKeyCount = size; } data.append(key, val); // append key/value to data size++; }
.asSubclass(Writable.class), conf); try (MapFile.Writer writer = new MapFile.Writer(conf, fs, out, reader.getKeyClass().asSubclass(WritableComparable.class), reader.getValueClass())) { while (reader.next(key, value)) { // copy all entries writer.append(key, value);
outWriter.append(currentKey, currentValue);
/** Append a key/value pair to the map. The key must be greater or equal * to the previous key added to the map. */ public synchronized void append(WritableComparable key, Writable val) throws IOException { checkKey(key); if (size % indexInterval == 0) { // add an index entry position.set(data.getLength()); // point to current eof index.append(key, position); } data.append(key, val); // append key/value to data size++; }
@Override public synchronized void append(WritableComparable key, Writable val) throws IOException { super.append(key, val); buf.reset(); key.write(buf); bloomKey.set(byteArrayForBloomKey(buf), 1.0); bloomFilter.add(bloomKey); }
@Override public synchronized void append(WritableComparable key, Writable val) throws IOException { super.append(key, val); buf.reset(); key.write(buf); bloomKey.set(byteArrayForBloomKey(buf), 1.0); bloomFilter.add(bloomKey); }
/** Append a value to the file. */ public synchronized void append(Writable value) throws IOException { super.append(count, value); // add to map count.set(count.get()+1); // increment count } }
private static void createMapFile(Configuration conf, FileSystem fs, Path path, CompressionCodec codec, CompressionType type, int records) throws IOException { MapFile.Writer writer = new MapFile.Writer(conf, path, MapFile.Writer.keyClass(Text.class), MapFile.Writer.valueClass(Text.class), MapFile.Writer.compression(type, codec)); Text key = new Text(); for (int j = 0; j < records; j++) { key.set(String.format("%03d", j)); writer.append(key, key); } writer.close(); }
private void close() throws IOException { for (int i = 0; i < inReaders.length; i++) { IOUtils.closeStream(inReaders[i]); inReaders[i] = null; } if (outWriter != null) { outWriter.close(); outWriter = null; } } }
/** Append a value to the file. */ public synchronized void append(Writable value) throws IOException { super.append(count, value); // add to map count.set(count.get()+1); // increment count } }
@Override public synchronized void append(WritableComparable key, Writable val) throws IOException { super.append(key, val); buf.reset(); key.write(buf); bloomKey.set(byteArrayForBloomKey(buf), 1.0); bloomFilter.add(bloomKey); }