@Override protected void cleanup(Mapper<WALKey, WALEdit, ImmutableBytesWritable, Mutation>.Context context) throws IOException, InterruptedException { super.cleanup(context); }
@Override protected void cleanup(Context context) throws IOException, InterruptedException { super.cleanup(context); job.workerIterationEnd(metrics); }
/** {@inheritDoc} */ @Override protected void cleanup(Context ctx) throws IOException, InterruptedException { super.cleanup(ctx); HadoopErrorSimulator.instance().onMapCleanup(); }
@Override protected void cleanup(Context context) throws IOException, InterruptedException { super.cleanup(context); if (connection != null) { try { connection.close(); } catch (SQLException e) { LOG.error("Error {} while closing connection in the PhoenixIndexMapper class ", e.getMessage()); } } } }
@Override protected void cleanup(Context context) throws IOException, InterruptedException { super.cleanup(context); if (connection != null) { try { processBatch(context); connection.close(); if (outputConn != null) { outputConn.close(); } } catch (SQLException e) { LOG.error("Error while closing connection in the PhoenixIndexMapper class ", e); throw new IOException(e); } } }
super.cleanup(context); } catch (SQLException e) { LOG.error(" Error {} while read/write of a record ", e.getMessage());
@Override protected void cleanup(Context context) throws IOException, InterruptedException { super.cleanup(context); if (outputStream != null) { outputStream.close(); } } }
@Override protected void cleanup(Context context) throws IOException, InterruptedException { for (Map.Entry<Integer, WeightedVectorWritable> entry : mostDistantPoints.entrySet()) { context.write(new IntWritable(entry.getKey()), entry.getValue()); } super.cleanup(context); }
@Override protected void cleanup( Mapper<Rectangle, Iterable<S>, IntWritable, Triangulation>.Context context) throws IOException, InterruptedException { super.cleanup(context); writer.close(context); } }
@Override protected void cleanup( Mapper<Path, BytesWritable, Text, Text>.Context context) throws IOException, InterruptedException { log.debug("Cleaning up and emitting final result..."); super.cleanup(context); this.emit(context); }
protected void cleanup(Context context) throws IOException, InterruptedException { mapCleanup = true; super.cleanup(context); } }
@SuppressWarnings({ "rawtypes", "unchecked" }) @Override protected void cleanup(org.apache.hadoop.mapreduce.Mapper.Context context) throws IOException, InterruptedException { mapCleanup = true; super.cleanup(context); }
@Override protected void cleanup( Mapper<JobFile, FileStatus, ImmutableBytesWritable, Put>.Context context) throws IOException, InterruptedException { if (hbaseConnection != null) { hbaseConnection.close(); } super.cleanup(context); }
@Override protected void cleanup( final Mapper<GeoWaveInputKey, SimpleFeature, AvroKey<AvroSimpleFeatureCollection>, NullWritable>.Context context) throws IOException, InterruptedException { super.cleanup(context); writeRemainingAvroBatches(context); }
@Override protected void cleanup(final Context context) throws IOException, InterruptedException { osmProvider.close(); super.cleanup(context); }
@Override protected void cleanup(Context context) throws IOException, InterruptedException { super.cleanup(context); long secs = (System.currentTimeMillis() - startEpoch) / 1000; incrementCounter(context, Counter.BANDWIDTH_IN_BYTES, totalBytesCopied / ((secs == 0 ? 1 : secs))); } }
@Override protected void cleanup(Context ctx) throws IOException, InterruptedException { if (weightsPerFeature != null) { ctx.write(new Text(TrainNaiveBayesJob.WEIGHTS_PER_FEATURE), new VectorWritable(weightsPerFeature)); ctx.write(new Text(TrainNaiveBayesJob.WEIGHTS_PER_LABEL), new VectorWritable(weightsPerLabel)); } super.cleanup(ctx); } }
@Override protected void cleanup(Context ctx) throws IOException, InterruptedException { ctx.write(new Text(TrainNaiveBayesJob.LABEL_THETA_NORMALIZER), new VectorWritable(trainer.retrievePerLabelThetaNormalizer())); super.cleanup(ctx); } }
@Override protected void cleanup(Context context) throws IOException, InterruptedException { super.cleanup(context); downloader.shutDown(); vectorizer.shutDown(); }
/** {@inheritDoc} */ @Override protected void cleanup(Context hadoopContext) throws IOException, InterruptedException { ResourceUtils.closeOrLog(mTableWriter); ResourceUtils.releaseOrLog(mTable); ResourceUtils.releaseOrLog(mFiji); super.cleanup(hadoopContext); } }