private static String convertToString(org.apache.hadoop.hive.metastore.api.Date val) { if (val == null) { return ""; } DateWritableV2 writableValue = new DateWritableV2((int) val.getDaysSinceEpoch()); return writableValue.toString(); }
DAYS {@Override Object cast(long longValue) { return (new DateWritableV2((int)longValue)).get();} };
private IntWritable evaluate(Date date, Date date2) { if (date == null || date2 == null) { return null; } result.set(DateWritableV2.dateToDays(date) - DateWritableV2.dateToDays(date2)); return result; } }
@Override protected long getDateField(long days) { dateWritable.set((int) days); return dateWritable.getTimeInSeconds(); }
public Date getPrimitiveJavaObject(Object o) { return o == null ? null : ((DateWritableV2) o).get(); }
public VectorUDFUnixTimeStampDate(int colNum, int outputColumnNum) { /* not a real field */ super(-1, colNum, outputColumnNum); dateWritable = new DateWritableV2(); }
@Override protected void update(Object p, PrimitiveObjectInspector inputOI) { // DateWritableV2 is mutable, DateStatsAgg needs its own copy DateWritableV2 v = new DateWritableV2((DateWritableV2) inputOI.getPrimitiveWritableObject(p)); //Update min counter if new value is less than min seen so far if (min == null || v.compareTo(min) < 0) { min = v; } //Update max counter if new value is greater than max seen so far if (max == null || v.compareTo(max) > 0) { max = v; } // Add value to NumDistinctValue Estimator numDV.addToEstimator(v.getDays()); }
public DateScalarSubtractIntervalYearMonthColumn(long value, int colNum, int outputColumnNum) { super(outputColumnNum); this.value = new Date(DateWritableV2.daysToMillis((int) value)); this.colNum = colNum; }
@Override protected void updateMin(Object minValue, DateObjectInspector minFieldOI) { if ((minValue != null) && (min == null || min.compareTo(minFieldOI.getPrimitiveWritableObject(minValue)) > 0)) { // DateWritableV2 is mutable, DateStatsAgg needs its own copy min = new DateWritableV2(minFieldOI.getPrimitiveWritableObject(minValue)); } }
@Override public void writeDate(int dateAsDays) throws IOException { beginPrimitive(); if (dateWritable == null) { dateWritable = new DateWritableV2(); } dateWritable.set(dateAsDays); LazyDate.writeUTF8(output, dateWritable); finishPrimitive(); }
private Date readDateValue(String dateStr) { // try either yyyy-mm-dd, or integer representing days since epoch try { DateWritableV2 writableVal = new DateWritableV2(org.apache.hadoop.hive.common.type.Date.valueOf(dateStr)); return new Date(writableVal.getDays()); } catch (IllegalArgumentException err) { // Fallback to integer parsing LOG.debug("Reading date value as days since epoch: {}", dateStr); return new Date(Long.parseLong(dateStr)); } } }
public long addMonthsToDays(long days, int months) { long millis = DateWritableV2.daysToMillis((int) days); millis = addMonthsToMillis(millis, months); // Convert millis result back to days return DateWritableV2.millisToDays(millis); }
/** * Writes a Date in SQL date format to the output stream. * @param out * The output stream * @param d * The Date to write * @throws IOException */ public static void writeUTF8(OutputStream out, DateWritableV2 d) throws IOException { ByteBuffer b = Text.encode(d.toString()); out.write(b.array(), 0, b.limit()); }
String dateString = dateConverter.convert(arguments[0].get()).toString(); if (dateParser.parseDate(dateString, dateVal)) { output.set(dateVal); } else { return null; Timestamp ts = ((TimestampWritableV2) dateConverter.convert(arguments[0].get())) .getTimestamp(); output.set(DateWritableV2.millisToDays(ts.toEpochMilli())); break; case DATE: DateWritableV2 dw = (DateWritableV2) dateConverter.convert(arguments[0].get()); output.set(dw.getDays()); break; default: int newDays = output.getDays() + (signModifier * toBeAdded); output.set(newDays); return output;
public Object set(Object o, Date d) { if (d == null) { return null; } ((DateWritableV2) o).set(d); return o; }
public static void writeDateToByteStream(RandomAccessOutput byteStream, DateWritableV2 date) { LazyBinaryUtils.writeVInt(byteStream, date.getDays()); }
protected long evaluateTimestamp(ColumnVector columnVector, int index, long numDays) { TimestampColumnVector tcv = (TimestampColumnVector) columnVector; // Convert to date value (in days) long days = DateWritableV2.millisToDays(tcv.getTime(index)); if (isPositive) { days += numDays; } else { days -= numDays; } return days; }
String dateString = textConverter.convert(arguments[0].get()).toString(); if (dateParser.parseDate(dateString, date)) { output.set(date); } else { return null; Timestamp ts = ((TimestampWritableV2) timestampConverter.convert(arguments[0].get())) .getTimestamp(); output.set(DateWritableV2.millisToDays(ts.toEpochMilli())); break; case TIMESTAMPLOCALTZ: case DATE: DateWritableV2 dw = (DateWritableV2) dateWritableConverter.convert(arguments[0].get()); output.set(dw); break; default:
@Override public boolean equals(Object o) { if (!(o instanceof DateWritableV2)) { return false; } return compareTo((DateWritableV2) o) == 0; }