/** * Wraps native split. * * @param id Split ID. * @param split Split. * @param hosts Hosts. * @throws IOException If failed. */ public static HadoopSplitWrapper wrapSplit(int id, Object split, String[] hosts) throws IOException { ByteArrayOutputStream arr = new ByteArrayOutputStream(); ObjectOutput out = new ObjectOutputStream(arr); assert split instanceof Writable; ((Writable)split).write(out); out.flush(); return new HadoopSplitWrapper(id, split.getClass().getName(), arr.toByteArray(), hosts); }
/** * Unwraps native split. * * @param o Wrapper. * @return Split. */ public static Object unwrapSplit(HadoopSplitWrapper o) { try { Writable w = (Writable)HadoopUtils.class.getClassLoader().loadClass(o.className()).newInstance(); w.readFields(new ObjectInputStream(new ByteArrayInputStream(o.bytes()))); return w; } catch (Exception e) { throw new IllegalStateException(e); } }
@Override public Object call() throws Exception { res.hosts(); return null; } }, AssertionError.class, null);
/** * Tests serialization of wrapper and the wrapped native split. * @throws Exception If fails. */ @Test public void testSerialization() throws Exception { FileSplit nativeSplit = new FileSplit(new Path("/path/to/file"), 100, 500, new String[]{"host1", "host2"}); assertEquals("/path/to/file:100+500", nativeSplit.toString()); HadoopSplitWrapper split = HadoopUtils.wrapSplit(10, nativeSplit, nativeSplit.getLocations()); assertEquals("[host1, host2]", Arrays.toString(split.hosts())); ByteArrayOutputStream buf = new ByteArrayOutputStream(); ObjectOutput out = new ObjectOutputStream(buf); out.writeObject(split); ObjectInput in = new ObjectInputStream(new ByteArrayInputStream(buf.toByteArray())); final HadoopSplitWrapper res = (HadoopSplitWrapper)in.readObject(); assertEquals("/path/to/file:100+500", HadoopUtils.unwrapSplit(res).toString()); GridTestUtils.assertThrows(log, new Callable<Object>() { @Override public Object call() throws Exception { res.hosts(); return null; } }, AssertionError.class, null); }