@Override public Object getWritableConstantValue() { if (value==null) { return null; } return new TimestampWritableV2(value); } }
LazyBinaryTimestamp(WritableTimestampObjectInspector oi) { super(oi); data = new TimestampWritableV2(); }
public LazyTimestamp(LazyTimestampObjectInspector oi) { super(oi); data = new TimestampWritableV2(); }
public Object create(Timestamp t) { return new TimestampWritableV2(t); } }
LazyBinaryTimestamp(LazyBinaryTimestamp copy) { super(copy); data = new TimestampWritableV2(copy.data); }
@Override public void copyToNewInstance(Object newInstance) throws UDFArgumentException { super.copyToNewInstance(newInstance); // Need to preserve currentTimestamp GenericUDFCurrentTimestamp other = (GenericUDFCurrentTimestamp) newInstance; if (this.currentTimestamp != null) { other.currentTimestamp = new TimestampWritableV2(this.currentTimestamp); } } }
public UDFDateFloor(String granularity) { this.granularity = QueryGranularity.fromString(granularity); this.resultTS = new TimestampWritableV2(); this.resultTSLTZ = new TimestampLocalTZWritable(); }
@Override public void writeTimestamp(Timestamp v) throws IOException { beginPrimitive(); if (timestampWritable == null) { timestampWritable = new TimestampWritableV2(); } timestampWritable.set(v); LazyTimestamp.writeUTF8(output, timestampWritable); finishPrimitive(); }
@Override public void writeTimestamp(Timestamp v) throws IOException { beginElement(); if (timestampWritable == null) { timestampWritable = new TimestampWritableV2(); } timestampWritable.set(v); timestampWritable.writeToByteStream(output); finishElement(); }
private void compareToUDFWeekOfYearLong(Timestamp t, int y) { UDFWeekOfYear udf = new UDFWeekOfYear(); TimestampWritableV2 tsw = new TimestampWritableV2( org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos())); IntWritable res = udf.evaluate(tsw); Assert.assertEquals(res.get(), y); }
private void runAndVerifyTs(String str, String expResult, GenericUDF udf) throws HiveException { DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new TimestampWritableV2( Timestamp.valueOf(str)) : null); DeferredObject[] args = { valueObj0 }; Text output = (Text) udf.evaluate(args); assertEquals("last_day() test ", expResult, output != null ? output.toString() : null); } }
private void runAndVerify(Timestamp ts, int months, Text dateFormat, String expResult, GenericUDF udf) throws HiveException { DeferredObject valueObj0 = new DeferredJavaObject(new TimestampWritableV2(ts)); DeferredObject valueObj1 = new DeferredJavaObject(new IntWritable(months)); DeferredObject valueObj2 = new DeferredJavaObject(dateFormat); DeferredObject[] args = {valueObj0, valueObj1, valueObj2}; Text output = (Text) udf.evaluate(args); assertEquals("add_months() test for timestamp", expResult, output != null ? output.toString() : null); }
private TimestampWritableV2 toTimestampWritable(long daysSinceEpoch) { return new TimestampWritableV2( org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli( DateWritableV2.daysToMillis((int) daysSinceEpoch))); }
private void runAndVerifyTs(String str, Text fmtText, String expResult, GenericUDF udf) throws HiveException { DeferredObject valueObj0 = new DeferredJavaObject(str != null ? new TimestampWritableV2( Timestamp.valueOf(str)) : null); DeferredObject valueObj1 = new DeferredJavaObject(fmtText); DeferredObject[] args = { valueObj0, valueObj1 }; Text output = (Text) udf.evaluate(args); assertEquals("date_format() test ", expResult, output != null ? output.toString() : null); } }
private void compareToUDFMinuteLong(Timestamp t, int y) throws HiveException { UDFMinute udf = new UDFMinute(); udf.initialize(new ObjectInspector[]{PrimitiveObjectInspectorFactory.writableTimestampObjectInspector}); TimestampWritableV2 tsw = new TimestampWritableV2( org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos())); IntWritable res = (IntWritable) udf.evaluate( new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(tsw)}); Assert.assertEquals(res.get(), y); }
private void compareToUDFYearLong(Timestamp t, int y) throws HiveException { UDFYear udf = new UDFYear(); udf.initialize(new ObjectInspector[]{PrimitiveObjectInspectorFactory.writableTimestampObjectInspector}); TimestampWritableV2 tsw = new TimestampWritableV2( org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos())); IntWritable res = (IntWritable) udf.evaluate( new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(tsw)}); Assert.assertEquals(res.get(), y); }
private void compareToUDFMonthLong(Timestamp t, int y) throws HiveException { UDFMonth udf = new UDFMonth(); udf.initialize(new ObjectInspector[]{PrimitiveObjectInspectorFactory.writableTimestampObjectInspector}); TimestampWritableV2 tsw = new TimestampWritableV2( org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos())); IntWritable res = (IntWritable) udf.evaluate( new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(tsw)}); Assert.assertEquals(res.get(), y); }
private void compareToUDFDayOfMonthLong(Timestamp t, int y) throws HiveException { UDFDayOfMonth udf = new UDFDayOfMonth(); udf.initialize(new ObjectInspector[]{PrimitiveObjectInspectorFactory.writableTimestampObjectInspector}); TimestampWritableV2 tsw = new TimestampWritableV2( org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos())); IntWritable res = (IntWritable) udf.evaluate( new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(tsw)}); Assert.assertEquals(res.get(), y); }
private void compareToUDFHourLong(Timestamp t, int y) throws HiveException { UDFHour udf = new UDFHour(); udf.initialize(new ObjectInspector[]{PrimitiveObjectInspectorFactory.writableTimestampObjectInspector}); TimestampWritableV2 tsw = new TimestampWritableV2( org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos())); IntWritable res = (IntWritable) udf.evaluate( new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(tsw)}); Assert.assertEquals(res.get(), y); }
private void compareToUDFSecondLong(Timestamp t, int y) throws HiveException { UDFSecond udf = new UDFSecond(); udf.initialize(new ObjectInspector[]{PrimitiveObjectInspectorFactory.writableTimestampObjectInspector}); TimestampWritableV2 tsw = new TimestampWritableV2( org.apache.hadoop.hive.common.type.Timestamp.ofEpochMilli(t.getTime(), t.getNanos())); IntWritable res = (IntWritable) udf.evaluate( new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(tsw)}); Assert.assertEquals(res.get(), y); }