private void flushIndexToContextAndClose(BytesWritable key, IncrementalIndex index, Context context) throws IOException, InterruptedException { final List<String> dimensions = index.getDimensionNames(); Iterator<Row> rows = index.iterator(); while (rows.hasNext()) { context.progress(); Row row = rows.next(); InputRow inputRow = getInputRowFromRow(row, dimensions); // reportParseExceptions is true as any unparseable data is already handled by the mapper. InputRowSerde.SerializeResult serializeResult = InputRowSerde.toBytes(typeHelperMap, inputRow, combiningAggs); context.write( key, new BytesWritable(serializeResult.getSerializedRow()) ); } index.close(); }
@Override protected void reduce(final BytesWritable key, Iterable<BytesWritable> values, final Context context) throws IOException, InterruptedException { Iterator<BytesWritable> iter = values.iterator(); BytesWritable first = iter.next(); if (iter.hasNext()) { LinkedHashSet<String> dimOrder = new LinkedHashSet<>(); SortableBytes keyBytes = SortableBytes.fromBytesWritable(key); Bucket bucket = Bucket.fromGroupKey(keyBytes.getGroupKey()).lhs; IncrementalIndex index = makeIncrementalIndex(bucket, combiningAggs, config, null, null); index.add(InputRowSerde.fromBytes(typeHelperMap, first.getBytes(), aggregators)); while (iter.hasNext()) { context.progress(); InputRow value = InputRowSerde.fromBytes(typeHelperMap, iter.next().getBytes(), aggregators); if (!index.canAppendRow()) { dimOrder.addAll(index.getDimensionOrder()); log.info("current index full due to [%s]. creating new index.", index.getOutOfRowsReason()); flushIndexToContextAndClose(key, index, context); index = makeIncrementalIndex(bucket, combiningAggs, config, dimOrder, index.getColumnCapabilities()); } index.add(value); } flushIndexToContextAndClose(key, index, context); } else { context.write(key, first); } }
private void flushIndexToContextAndClose(BytesWritable key, IncrementalIndex index, Context context) throws IOException, InterruptedException { final List<String> dimensions = index.getDimensionNames(); Iterator<Row> rows = index.iterator(); while (rows.hasNext()) { context.progress(); Row row = rows.next(); InputRow inputRow = getInputRowFromRow(row, dimensions); // reportParseExceptions is true as any unparseable data is already handled by the mapper. InputRowSerde.SerializeResult serializeResult = InputRowSerde.toBytes(typeHelperMap, inputRow, combiningAggs); context.write( key, new BytesWritable(serializeResult.getSerializedRow()) ); } index.close(); }
/** * Advanced application writers can use the * {@link #run(org.apache.hadoop.mapreduce.Reducer.Context)} method to * control how the reduce task works. */ public void run(Context context) throws IOException, InterruptedException { setup(context); while (context.nextKey()) { context.progress(); reduce(context.getCurrentKey(), context.getValues(), context); } cleanup(context); } }
@Override protected void reduce(IntWritable partitionID, Iterable<Shape> shapes, Context context) throws IOException, InterruptedException { LOG.info("Working on partition #"+partitionID); for (Shape shape : shapes) { context.write(partitionID, shape); context.progress(); } // Indicate end of partition to close the file context.write(new IntWritable(-partitionID.get()-1), null); LOG.info("Done with partition #"+partitionID); } }
public static void reduceReport(Reducer.Context context) { context.getCounter(MATCH_COUNTERS.REDUCETASKSDONE).increment(1); context.progress(); }
@Override public void reducerProgress() { if (reducercontext != null && System.currentTimeMillis() - lastprogress > 300000) { reducercontext.progress(); lastprogress = System.currentTimeMillis(); } }
@Override public void reducerProgress() { if (reducercontext != null && System.currentTimeMillis() - lastprogress > 300000) { reducercontext.progress(); lastprogress = System.currentTimeMillis(); } }
@Override public void progress() { context.progress(); } }
@Override public void progress() { context.progress(); } }
@Override public void progress() { super.progress(); context.progress(); } };
@Override public void complete( final ByteArray id, final VALUEIN value, final NeighborList<VALUEIN> primaryList) throws IOException, InterruptedException { context.progress(); processNeighbors(key.partitionData, id, value, primaryList, context, summary); processor.remove(id); } });
@Override protected void reduce(IntWritable dummy, Iterable<Canvas> intermediateLayers, Context context) throws IOException, InterruptedException { // TODO Auto-generated method stub Canvas finalLayer = plotter.createCanvas(imageWidth, imageHeight, inputMBR); for (Canvas intermediateLayer : intermediateLayers) { plotter.merge(finalLayer, intermediateLayer); context.progress(); } context.write(NullWritable.get(), finalLayer); }
@Override protected void reduce(LongWritable tileID, Iterable<Canvas> interLayers, Context context) throws IOException, InterruptedException { Rectangle tileMBR = TileIndex.getMBR(inputMBR, tileID.get()); Canvas finalLayer = plotter.createCanvas(tileWidth, tileHeight, tileMBR); for (Canvas interLayer : interLayers) { plotter.merge(finalLayer, interLayer); context.progress(); } context.write(tileID, finalLayer); } }
@Override protected void reduce(LongWritable tileID, Iterable<Canvas> interLayers, Context context) throws IOException, InterruptedException { Rectangle tileMBR = TileIndex.getMBR(inputMBR, tileID.get()); Canvas finalLayer = plotter.createCanvas(tileWidth, tileHeight, tileMBR); for (Canvas interLayer : interLayers) { plotter.merge(finalLayer, interLayer); context.progress(); } context.write(tileID, finalLayer); } }
@Override public void progress() { context.progress(); } }
@Override public void progress() { super.progress(); context.progress(); } };
@Override public void progress() { context.progress(); } }
@Override public void progress() { super.progress(); context.progress(); } };