private Date readDateValue(String dateStr) { // try either yyyy-mm-dd, or integer representing days since epoch try { DateWritable writableVal = new DateWritable(java.sql.Date.valueOf(dateStr)); return new Date(writableVal.getDays()); } catch (IllegalArgumentException err) { // Fallback to integer parsing LOG.debug("Reading date value as days since epoch: " + dateStr); return new Date(Long.parseLong(dateStr)); } } }
public DateWritable(int d) { set(d); }
/** * Set the DateWritable based on the year/month/day of the date in the local timezone. * @param d Date value */ public void set(Date d) { if (d == null) { daysSinceEpoch = 0; return; } set(dateToDays(d)); }
@Override protected long getDateField(long days) { dateWritable.set((int) days); return dateWritable.getTimeInSeconds(); }
private static String convertToString(org.apache.hadoop.hive.metastore.api.Date val) { if (val == null) { return ""; } DateWritable writableValue = new DateWritable((int) val.getDaysSinceEpoch()); return writableValue.toString(); }
public DateScalarSubtractTimestampColumn(long value, int colNum, int outputColumn) { this.colNum = colNum; // Scalar input #1 is type date (days). For the math we convert it to a timestamp. this.value = new Timestamp(0); this.value.setTime(DateWritable.daysToMillis((int) value)); this.outputColumn = outputColumn; }
return new LongWritable(0); case TIMESTAMP: return new TimestampWritable(new Timestamp(0)); case DATE: return new DateWritable(new Date(0)); case FLOAT: return new FloatWritable(0); return new DoubleWritable(0); case BINARY: return new BytesWritable(ArrayUtils.EMPTY_BYTE_ARRAY); case STRING: return new Text(ArrayUtils.EMPTY_BYTE_ARRAY); case VARCHAR: return new HiveVarcharWritable(new HiveVarchar(StringUtils.EMPTY, -1)); case CHAR: return new HiveCharWritable(new HiveChar(StringUtils.EMPTY, -1)); case DECIMAL: return new HiveDecimalWritable(); case INTERVAL_YEAR_MONTH: return new HiveIntervalYearMonthWritable();
return null; Text t = new Text(); return null; case BOOLEAN: t.set(((BooleanObjectInspector) inputOI).get(input) ? trueBytes : falseBytes); return t; .getStrippedValue()); .toString()); t.set(((DateObjectInspector) inputOI).getPrimitiveWritableObject(input).toString()); return t; .getPrimitiveWritableObject(input).toString()); return t; case BINARY: if (binaryOI.preferWritable()) { BytesWritable bytes = binaryOI.getPrimitiveWritableObject(input); t.set(bytes.getBytes(), 0, bytes.getLength()); t.set(((HiveDecimalObjectInspector) inputOI).getPrimitiveWritableObject(input).toString()); return t; default:
return null; Date date = new Date(0); switch (inputType) { case STRING: String dateString = converter.convert(argument.get()).toString(); try { date.setTime(formatter.parse(dateString).getTime()); } catch (ParseException e) { return null; case TIMESTAMP: Timestamp ts = ((TimestampWritable) converter.convert(argument.get())) .getTimestamp(); date.setTime(ts.getTime()); break; case DATE: DateWritable dw = (DateWritable) converter.convert(argument.get()); date = dw.get(); break; default:
HiveDecimalWritable vDecimal = ((HiveDecimalObjectInspector)inputOI). getPrimitiveWritableObject(parameters[0]); int startIdx = vDecimal.toBytes(scratchBuffer); bf.addBytes(scratchBuffer, startIdx, scratchBuffer.length - startIdx); break; DateWritable vDate = ((DateObjectInspector)inputOI). getPrimitiveWritableObject(parameters[0]); bf.addLong(vDate.getDays()); break; case TIMESTAMP: Timestamp vTimeStamp = ((TimestampObjectInspector)inputOI). getPrimitiveJavaObject(parameters[0]); bf.addLong(vTimeStamp.getTime()); break; case CHAR: Text vChar = ((HiveCharObjectInspector)inputOI). getPrimitiveWritableObject(parameters[0]).getStrippedValue(); bf.addBytes(vChar.getBytes(), 0, vChar.getLength()); break; case VARCHAR: Text vVarChar = ((HiveVarcharObjectInspector)inputOI). getPrimitiveWritableObject(parameters[0]).getTextValue(); bf.addBytes(vVarChar.getBytes(), 0, vVarChar.getLength()); break; case STRING:
@Override public DateWritable getPrimitiveWritableObject(Object o) { if (o == null) { return null; } final NullableDateHolder h = (NullableDateHolder) o; org.joda.time.LocalDate localDate = new org.joda.time.LocalDate(h.value, org.joda.time.DateTimeZone.UTC); // Use "toDate()" to get java.util.Date object with exactly the same year the same year, month and day as Joda date. // See more in Javadoc for "LocalDate#toDate()" return new DateWritable(new java.sql.Date(localDate.toDate().getTime())); }
public Date get(boolean doesTimeMatter) { return new Date(daysToMillis(daysSinceEpoch, doesTimeMatter)); }
String dateString = dateConverter.convert(arguments[0].get()).toString(); if (dateParser.parseDate(dateString, dateVal)) { output.set(dateVal); } else { return null; case TIMESTAMP: Timestamp ts = ((TimestampWritable) dateConverter.convert(arguments[0].get())) .getTimestamp(); output.set(DateWritable.millisToDays(ts.getTime())); break; case DATE: DateWritable dw = (DateWritable) dateConverter.convert(arguments[0].get()); output.set(dw.getDays()); break; default: int newDays = output.getDays() + (signModifier * toBeAdded); output.set(newDays); return output;
Text t; if (value instanceof String) { t = new Text((String) value); } else { t = ((WritableStringObjectInspector) outputOI).getPrimitiveWritableObject(value); bv.setVal(i, t.getBytes(), 0, t.getLength()); } else if (outputOI instanceof WritableHiveCharObjectInspector) { WritableHiveCharObjectInspector writableHiveCharObjectOI = (WritableHiveCharObjectInspector) outputOI; hiveCharWritable = writableHiveCharObjectOI.getPrimitiveWritableObject(value); Text t = hiveCharWritable.getTextValue(); hiveVarcharWritable = writableHiveVarcharObjectOI.getPrimitiveWritableObject(value); Text t = hiveVarcharWritable.getTextValue(); StringExpr.truncate(bv, i, t.getBytes(), 0, t.getLength(), maxLength); ts = ((WritableDateObjectInspector) outputOI).getPrimitiveJavaObject(value); long l = DateWritable.dateToDays(ts); lv.vector[i] = l; } else if (outputOI instanceof WritableBooleanObjectInspector) {
return writable.toString(); } else if (writable instanceof BytesWritable) { return ((BytesWritable) writable).getBytes(); } else if (writable instanceof ByteWritable) { return ((ByteWritable) writable).get(); } else if (writable instanceof DateWritable) { return ((DateWritable) writable).get().toLocalDate().toEpochDay(); } else if (writable instanceof org.apache.hadoop.hive.serde2.io.ShortWritable) { return ((org.apache.hadoop.hive.serde2.io.ShortWritable) writable).get(); } else if (writable instanceof HiveBaseCharWritable) { return ((HiveBaseCharWritable) writable).getTextValue().toString(); } else if (writable instanceof TimestampWritable) { return toEpochTimestamp(((TimestampWritable) writable).getTimestamp(), schema); } else if (writable instanceof org.apache.hadoop.hive.serde2.io.DoubleWritable) { return ((org.apache.hadoop.hive.serde2.io.DoubleWritable) writable).get(); } else if (writable instanceof HiveDecimalWritable) { return ((HiveDecimalWritable) writable).getHiveDecimal(); } else if (writable instanceof NullWritable) { return null;
protected void evaluateString(ColumnVector columnVector, LongColumnVector output, int i) { BytesColumnVector bcv = (BytesColumnVector) columnVector; text.set(bcv.vector[i], bcv.start[i], bcv.length[i]); try { date.setTime(formatter.parse(text.toString()).getTime()); output.vector[i] = DateWritable.dateToDays(date) - baseDate; } catch (ParseException e) { output.vector[i] = 1; output.isNull[i] = true; } } @Override
public static Datum convertWritable2Datum(Writable value) throws UnsupportedDataTypeException { if (value == null) { return NullDatum.get(); } DataType type = convertWritableToTajoType(value.getClass()); switch(type.getType()) { case INT1: return new Int2Datum(((ByteWritable)value).get()); case INT2: return new Int2Datum(((ShortWritable)value).get()); case INT4: return new Int4Datum(((IntWritable)value).get()); case INT8: return new Int8Datum(((LongWritable)value).get()); case FLOAT4: return new Float4Datum(((FloatWritable)value).get()); case FLOAT8: return new Float8Datum(((DoubleWritable)value).get()); case DATE: return new DateDatum(((DateWritable)value).getDays() + DateTimeConstants.UNIX_EPOCH_JDATE); case TIMESTAMP: return new TimestampDatum(DateTimeUtil.javaTimeToJulianTime( ((TimestampWritable)value).getTimestamp().getTime())); case CHAR: return new CharDatum(value.toString()); case TEXT: return new TextDatum(value.toString()); case VARBINARY: return new BlobDatum(((BytesWritable)value).getBytes()); } throw new TajoRuntimeException(new UnsupportedDataTypeException(value.getClass().getTypeName())); } }
actualValue = new SqlVarbinary(((BytesWritable) actualValue).copyBytes()); actualValue = new SqlDate(((DateWritable) actualValue).getDays()); actualValue = ((HiveCharWritable) actualValue).getPaddedValue().toString(); HiveDecimalWritable writable = (HiveDecimalWritable) actualValue; BigInteger rescaledValue = rescale(writable.getHiveDecimal().unscaledValue(), writable.getScale(), decimalType.getScale()); actualValue = new SqlDecimal(rescaledValue, decimalType.getPrecision(), decimalType.getScale()); actualValue = sqlTimestampOf((timestamp.getSeconds() * 1000) + (timestamp.getNanos() / 1000000L), SESSION);
case TIMESTAMP: ((TimestampColumnVector) batch.cols[projectionColumnNum]).set( batchIndex, ((TimestampWritable) object).getTimestamp()); break; case DATE: ((LongColumnVector) batch.cols[projectionColumnNum]).vector[batchIndex] = ((DateWritable) object).getDays(); break; case FLOAT: batchIndex, bw.getBytes(), 0, bw.getLength()); batchIndex, tw.getBytes(), 0, tw.getLength()); hiveVarchar = (HiveVarchar) object; } else { hiveVarchar = ((HiveVarcharWritable) object).getHiveVarchar(); hiveChar = (HiveChar) object; } else { hiveChar = ((HiveCharWritable) object).getHiveChar();