@Override public void init(HBaseSerDeParameters hbaseParam, Properties properties) throws SerDeException { super.init(hbaseParam, properties); this.serdeParams = hbaseParam.getSerdeParams(); this.serializer = new HBaseRowSerializer(hbaseParam); }
/** * Construct a LazyHBaseRow object with the ObjectInspector. */ public LazyHBaseRow(LazySimpleStructObjectInspector oi, HBaseSerDeParameters serdeParams) { super(oi); this.keyFactory = serdeParams.getKeyFactory(); this.valueFactories = serdeParams.getValueFactories(); this.columnsMapping = serdeParams.getColumnMappings().getColumnsMapping(); }
@Override public DecomposedPredicate decomposePredicate( JobConf jobConf, Deserializer deserializer, ExprNodeDesc predicate) { HBaseKeyFactory keyFactory = ((HBaseSerDe) deserializer).getKeyFactory(); return keyFactory.decomposePredicate(jobConf, deserializer, predicate); }
@Override public LazyObjectBase createValueObject(ObjectInspector inspector) throws SerDeException { return HBaseSerDeHelper.createLazyField(columnMappings.getColumnsMapping(), fieldID, inspector); }
@VisibleForTesting LazyHBaseRow(LazySimpleStructObjectInspector oi, ColumnMappings columnMappings) { super(oi); this.keyFactory = DefaultHBaseKeyFactory.forTest(null, columnMappings); this.valueFactories = null; this.columnsMapping = columnMappings.getColumnsMapping(); }
public static ColumnMappings parseColumnsMapping( String columnsMappingSpec, boolean doColumnRegexMatching) throws SerDeException { return parseColumnsMapping(columnsMappingSpec, doColumnRegexMatching, false); } /**
@Override public void configureOutputJobProperties( TableDesc tableDesc, Map<String, String> jobProperties) { //Output this.configureInputJobProps = false; configureTableJobProperties(tableDesc, jobProperties); }
/** * Set the HBase row data(a Result writable) for this LazyStruct. * @see LazyHBaseRow#init(org.apache.hadoop.hbase.client.Result) */ public void init(Result r) { this.result = r; setParsed(false); }
public void init( Result r, byte [] columnFamilyBytes, List<Boolean> binaryStorage, byte[] qualPrefix, boolean hideQualPrefix) { this.isNull = false; this.result = r; this.columnFamilyBytes = columnFamilyBytes; this.binaryStorage = binaryStorage; this.qualPrefix = qualPrefix; this.hideQualPrefix = hideQualPrefix; setParsed(false); }
@Override public DecomposedPredicate decomposePredicate(JobConf jobConf, Deserializer deserializer, ExprNodeDesc predicate) { return HBaseStorageHandler.decomposePredicate(jobConf, (HBaseSerDe) deserializer, predicate); } }
@Override public Class<? extends OutputFormat> getOutputFormatClass() { if (isHBaseGenerateHFiles(jobConf)) { return HiveHFileOutputFormat.class; } return HiveHBaseTableOutputFormat.class; }
public HBaseSplit(TableSplit tableSplit, Path dummyPath) { super(dummyPath, 0, 0, (String[]) null); this.tableSplit = tableSplit; this.snapshotSplit = HBaseTableSnapshotInputFormatUtil.createTableSnapshotRegionSplit(); this.isTableSplit = true; }
@Override public void init(HBaseSerDeParameters hbaseParam, Properties properties) throws SerDeException { this.hbaseParams = hbaseParam; this.keyMapping = hbaseParam.getKeyColumnMapping(); this.properties = properties; }
public static InputStream from(DataInput dataInput) { if(dataInput instanceof InputStream) { return (InputStream)dataInput; } return new DataInputInputStream(dataInput); } }
public static OutputStream from(DataOutput dataOutput) { if(dataOutput instanceof OutputStream) { return (OutputStream)dataOutput; } return new DataOutputOutputStream(dataOutput); } }
@Override public DecomposedPredicate decomposePredicate(JobConf jobConf, Deserializer deserializer, ExprNodeDesc predicate) { SampleHBasePredicateDecomposer decomposedPredicate = new SampleHBasePredicateDecomposer(keyMapping); return decomposedPredicate.decomposePredicate(keyMapping.columnName, predicate); } }
@Override public void close(boolean abort) throws IOException { close(null); } }
@Override public org.apache.hadoop.mapred.RecordWriter<ImmutableBytesWritable, Object> getRecordWriter( FileSystem fileSystem, JobConf jobConf, String name, Progressable progressable) throws IOException { return getMyRecordWriter(jobConf); }
@Override public void configureInputJobProperties( TableDesc tableDesc, Map<String, String> jobProperties) { //Input this.configureInputJobProps = true; configureTableJobProperties(tableDesc, jobProperties); }