@Override public Target outputConf(String key, String value) { target.outputConf(key, value); return this; }
/** * Configure the given output target to be compressed using the given codec. */ public static <T extends Target> T compress(T target, Class<? extends CompressionCodec> codecClass) { return (T) target.outputConf("mapred.output.compress", "true") .outputConf("mapred.output.compression.codec", codecClass.getCanonicalName()); }
@Override public SourceTarget<T> conf(String key, String value) { source.inputConf(key, value); target.outputConf(key, value); return this; } }
/** * Configure the given output target to be compressed using Gzip. */ public static <T extends Target> T gzip(T target) { return (T) compress(target, GzipCodec.class) .outputConf(AvroJob.OUTPUT_CODEC, DataFileConstants.DEFLATE_CODEC); }
/** * Configure the given output target to be compressed using Snappy. If the Target is one of the AvroParquet targets * contained in Crunch, the Parquet-specific SnappyCodec will be used instead of the default Hadoop one. */ public static <T extends Target> T snappy(T target) { Class<? extends CompressionCodec> snappyCodec = org.apache.hadoop.io.compress.SnappyCodec.class; if (target instanceof AvroParquetFileTarget || target instanceof AvroParquetFileSourceTarget) { snappyCodec = org.apache.parquet.hadoop.codec.SnappyCodec.class; } return (T) compress(target, snappyCodec) .outputConf(AvroJob.OUTPUT_CODEC, DataFileConstants.SNAPPY_CODEC); } }