/** * @param ctx Task context. * @throws IgniteCheckedException If failed. */ protected AdderBase(HadoopTaskContext ctx) throws IgniteCheckedException { valSer = ctx.valueSerialization(); keySer = ctx.keySerialization(); out = new HadoopDataOutStream(mem) { @Override public long move(long size) { long ptr = super.move(size); if (ptr == 0) // Was not able to move - not enough free space. ptr = allocateNextPage(size); assert ptr != 0; return ptr; } }; }
/** * Constructor. * * @param flushSize Flush size. * @param gzip Whether to perform GZIP. * @param taskCtx Task context. * @throws IgniteCheckedException If failed. */ public HadoopDirectDataOutputContext(int flushSize, boolean gzip, HadoopTaskContext taskCtx) throws IgniteCheckedException { this.flushSize = flushSize; this.gzip = gzip; keySer = taskCtx.keySerialization(); valSer = taskCtx.valueSerialization(); out = new HadoopDirectDataOutput(flushSize); if (gzip) gzipOut = new HadoopDirectDataOutput(Math.max(flushSize / 8, GZIP_OUT_MIN_ALLOC_SIZE)); }
/** * @param taskCtx Task context. * @throws IgniteCheckedException If failed. */ private Input(HadoopTaskContext taskCtx) throws IgniteCheckedException { keyReader = new Reader(taskCtx.keySerialization()); valReader = new Reader(taskCtx.valueSerialization()); }
/** * @param taskCtx Task context. * @throws IgniteCheckedException If failed. */ public Input(HadoopTaskContext taskCtx) throws IgniteCheckedException { cap = capacity(); keyReader = new Reader(taskCtx.keySerialization()); valReader = new Reader(taskCtx.valueSerialization()); }
null)); HadoopSerialization ser = taskCtx.keySerialization();
HadoopSerialization keySer = taskCtx.keySerialization(); HadoopSerialization valSer = taskCtx.valueSerialization();