@Override public List<WorkUnit> getWorkunits(SourceState state) { if (!state.contains(HadoopFileInputSource.FILE_INPUT_FORMAT_CLASS_KEY)) { state.setProp(HadoopFileInputSource.FILE_INPUT_FORMAT_CLASS_KEY, HiveSerDeWrapper.getDeserializer(state).getInputFormatClassName()); } return super.getWorkunits(state); }
@Override public HiveSerDeConverter init(WorkUnitState state) { super.init(state); Configuration conf = HadoopUtils.getConfFromState(state); try { this.serializer = HiveSerDeWrapper.getSerializer(state).getSerDe(); this.deserializer = HiveSerDeWrapper.getDeserializer(state).getSerDe(); this.deserializer.initialize(conf, state.getProperties()); setColumnsIfPossible(state); this.serializer.initialize(conf, state.getProperties()); } catch (IOException e) { log.error("Failed to instantiate serializer and deserializer", e); throw Throwables.propagate(e); } catch (SerDeException e) { log.error("Failed to initialize serializer and deserializer", e); throw Throwables.propagate(e); } return this; }
@Override public List<WorkUnit> getWorkunits(SourceState state) { if (!state.contains(HadoopFileInputSource.FILE_INPUT_FORMAT_CLASS_KEY)) { state.setProp(HadoopFileInputSource.FILE_INPUT_FORMAT_CLASS_KEY, HiveSerDeWrapper.getDeserializer(state).getInputFormatClassName()); } return super.getWorkunits(state); }
@Override public HiveSerDeConverter init(WorkUnitState state) { super.init(state); Configuration conf = HadoopUtils.getConfFromState(state); try { this.serializer = HiveSerDeWrapper.getSerializer(state).getSerDe(); this.deserializer = HiveSerDeWrapper.getDeserializer(state).getSerDe(); this.deserializer.initialize(conf, state.getProperties()); setColumnsIfPossible(state); this.serializer.initialize(conf, state.getProperties()); } catch (IOException e) { log.error("Failed to instantiate serializer and deserializer", e); throw Throwables.propagate(e); } catch (SerDeException e) { log.error("Failed to initialize serializer and deserializer", e); throw Throwables.propagate(e); } return this; }