public void setChar(String fieldName, HCatSchema recordSchema, HiveChar value) throws HCatException { set(fieldName, recordSchema, value); } public HiveChar getChar(String fieldName, HCatSchema recordSchema) throws HCatException {
@Override public void write(DataOutput dataOutput) throws IOException { ReaderWriter.writeDatum(dataOutput, dbName); ReaderWriter.writeDatum(dataOutput, eventId); }
@Test public void testSet() throws Exception { HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector()); boolean sawException = false; try { r.set(3, "Mary had a little lamb"); } catch (UnsupportedOperationException uoe) { sawException = true; } Assert.assertTrue(sawException); }
/** * Convert this LazyHCatRecord to a DefaultHCatRecord. This is required * before you can write out a record via write. * @return an HCatRecord that can be serialized * @throws HCatException */ public HCatRecord getWritable() throws HCatException { DefaultHCatRecord d = new DefaultHCatRecord(); d.copy(this); return d; } }
public static int compareRecords(HCatRecord first, HCatRecord second, StringBuilder debugDetail) { return compareRecordContents(first.getAll(), second.getAll(), debugDetail); }
private ObjectInspector getObjectInspector() throws Exception { return HCatRecordObjectInspectorFactory.getHCatRecordObjectInspector( (StructTypeInfo)getTypeInfo()); } }
public void testCompareTo() { HCatRecord[] recs = getHCatRecords(); Assert.assertTrue(HCatDataCheckUtil.compareRecords(recs[0], recs[1]) == 0); Assert.assertTrue(HCatDataCheckUtil.compareRecords(recs[4], recs[5]) == 0); }
@Test public void testSetWithName() throws Exception { HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector()); boolean sawException = false; try { r.set("fred", null, "bob"); } catch (UnsupportedOperationException uoe) { sawException = true; } Assert.assertTrue(sawException); }
@Override public void readFields(DataInput dataInput) throws IOException { dbName = (String)ReaderWriter.readDatum(dataInput); tableName = (String)ReaderWriter.readDatum(dataInput); ptnDesc = (Map<String,String>)ReaderWriter.readDatum(dataInput); importLocation = (String)ReaderWriter.readDatum(dataInput); isDefinitionOnly = (Boolean) ReaderWriter.readDatum(dataInput); eventId = (Long) ReaderWriter.readDatum(dataInput); }
public void setTimestamp(String fieldName, HCatSchema recordSchema, Timestamp value) throws HCatException { set(fieldName, recordSchema, value); } public Timestamp getTimestamp(String fieldName, HCatSchema recordSchema) throws HCatException {
@Override public void write(DataOutput dataOutput) throws IOException { ReaderWriter.writeDatum(dataOutput, dbName); ReaderWriter.writeDatum(dataOutput, tableName); ReaderWriter.writeDatum(dataOutput, isReplicatedEvent); ReaderWriter.writeDatum(dataOutput, eventId); }
@Override public void readFields(DataInput dataInput) throws IOException { dbName = (String)ReaderWriter.readDatum(dataInput); tableName = (String)ReaderWriter.readDatum(dataInput); ptnDesc = (Map<String,String>)ReaderWriter.readDatum(dataInput); exportLocation = (String)ReaderWriter.readDatum(dataInput); isMetadataOnly = (Boolean) ReaderWriter.readDatum(dataInput); eventId = (Long) ReaderWriter.readDatum(dataInput); }
public void setDecimal(String fieldName, HCatSchema recordSchema, HiveDecimal value) throws HCatException { set(fieldName, recordSchema, value); } public HiveDecimal getDecimal(String fieldName, HCatSchema recordSchema) throws HCatException {
@Override public void write(DataOutput dataOutput) throws IOException { ReaderWriter.writeDatum(dataOutput, dbName); ReaderWriter.writeDatum(dataOutput, tableName); ReaderWriter.writeDatum(dataOutput, ptnDesc); ReaderWriter.writeDatum(dataOutput, exportLocation); ReaderWriter.writeDatum(dataOutput, isMetadataOnly); ReaderWriter.writeDatum(dataOutput, eventId); }
@Override public void readFields(DataInput dataInput) throws IOException { dbName = (String)ReaderWriter.readDatum(dataInput); tableName = (String)ReaderWriter.readDatum(dataInput); ptnDesc = (Map<String,String>)ReaderWriter.readDatum(dataInput); isReplicatedEvent = (Boolean) ReaderWriter.readDatum(dataInput); eventId = (Long) ReaderWriter.readDatum(dataInput); }
public void setVarchar(String fieldName, HCatSchema recordSchema, HiveVarchar value) throws HCatException { set(fieldName, recordSchema, value); } public HiveVarchar getVarchar(String fieldName, HCatSchema recordSchema) throws HCatException {
@Override public void write(DataOutput dataOutput) throws IOException { ReaderWriter.writeDatum(dataOutput, dbName); ReaderWriter.writeDatum(dataOutput, tableName); ReaderWriter.writeDatum(dataOutput, ptnDesc); ReaderWriter.writeDatum(dataOutput, isReplicatedEvent); ReaderWriter.writeDatum(dataOutput, eventId); }
@Override public void readFields(DataInput dataInput) throws IOException { dbName = (String)ReaderWriter.readDatum(dataInput); tableName = (String)ReaderWriter.readDatum(dataInput); isReplicatedEvent = (Boolean) ReaderWriter.readDatum(dataInput); eventId = (Long) ReaderWriter.readDatum(dataInput); }
/** * Note that the proper way to construct a java.sql.Date for use with this object is * Date.valueOf("1999-12-31"). */ public void setDate(String fieldName, HCatSchema recordSchema, Date value) throws HCatException { set(fieldName, recordSchema, value); } public Date getDate(String fieldName, HCatSchema recordSchema) throws HCatException {