public static Type getPrimitiveType(PrimitiveTypeInfo typeInfo) switch (typeInfo.getPrimitiveCategory()) { case BOOLEAN: return BOOLEAN; return createUnboundedVarcharType(); case VARCHAR: return createVarcharType(((VarcharTypeInfo) typeInfo).getLength()); case CHAR: return createCharType(((CharTypeInfo) typeInfo).getLength()); case DATE: return DATE; case DECIMAL: DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; return createDecimalType(decimalTypeInfo.precision(), decimalTypeInfo.scale()); default: return null;
public static String getTypeName(ASTNode node) throws SemanticException { int token = node.getType(); String typeName; // datetime type isn't currently supported if (token == HiveParser.TOK_DATETIME) { throw new SemanticException(ErrorMsg.UNSUPPORTED_TYPE.getMsg()); } switch (token) { case HiveParser.TOK_CHAR: CharTypeInfo charTypeInfo = ParseUtils.getCharTypeInfo(node); typeName = charTypeInfo.getQualifiedName(); break; case HiveParser.TOK_VARCHAR: VarcharTypeInfo varcharTypeInfo = ParseUtils.getVarcharTypeInfo(node); typeName = varcharTypeInfo.getQualifiedName(); break; case HiveParser.TOK_DECIMAL: DecimalTypeInfo decTypeInfo = ParseUtils.getDecimalTypeTypeInfo(node); typeName = decTypeInfo.getQualifiedName(); break; default: typeName = TokenToTypeName.get(token); } return typeName; }
return null; return new CharTypeInfo(Integer.valueOf(parts.typeParams[0])); case VARCHAR: if (parts.typeParams.length != 1) { return null; return new VarcharTypeInfo(Integer.valueOf(parts.typeParams[0])); case DECIMAL: if (parts.typeParams.length != 2) { return null; return new DecimalTypeInfo(Integer.valueOf(parts.typeParams[0]), Integer.valueOf(parts.typeParams[1])); case TIMESTAMPLOCALTZ:
private static Object convertAsPrimitive(Object value, PrimitiveTypeInfo typeInfo) throws SerDeException { switch (typeInfo.getPrimitiveCategory()) { case TIMESTAMP: return Timestamp.ofEpochMilli(deserializeToMillis(value)); return ((Number) value).doubleValue(); case CHAR: return new HiveChar(value.toString(), ((CharTypeInfo) typeInfo).getLength()); case VARCHAR: return new HiveVarchar(value.toString(), ((VarcharTypeInfo) typeInfo).getLength()); case STRING: return value.toString(); throw new SerDeException("Unknown type: " + typeInfo.getPrimitiveCategory());
private static int getVarcharLength(TypeInfo hiveType) { int length = -1; if (hiveType instanceof PrimitiveTypeInfo) { PrimitiveTypeInfo hivePrimitiveType = (PrimitiveTypeInfo) hiveType; switch (hivePrimitiveType.getPrimitiveCategory()) { case CHAR: length = ((CharTypeInfo) hivePrimitiveType).getLength(); break; case VARCHAR: length = ((VarcharTypeInfo) hivePrimitiveType).getLength(); break; default: break; } } return length; } }
switch (primitiveTypeInfo.getPrimitiveCategory()) { case BOOLEAN: String string = text.toString(); HiveChar hiveChar = new HiveChar(string, ((CharTypeInfo) primitiveTypeInfo).getLength()); HiveChar expected = ((HiveCharWritable) object).getHiveChar(); if (!hiveChar.equals(expected)) { TestCase.fail("Char field mismatch (expected '" + expected + "' found '" + hiveChar + "')"); String string = text.toString(); HiveVarchar hiveVarchar = new HiveVarchar(string, ((VarcharTypeInfo) primitiveTypeInfo).getLength()); if (!value.equals(expected)) { DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo; int precision = decimalTypeInfo.getPrecision(); int scale = decimalTypeInfo.getScale(); TestCase.fail("Decimal field mismatch (expected " + expected.toString() + " found " + value.toString() + ") precision " + precision + ", scale " + scale); throw new Error("Unknown primitive category " + primitiveTypeInfo.getPrimitiveCategory());
@Override public HiveChar getPrimitiveJavaObject(Object o) { if (o == null) { return null; } HiveChar ret = ((LazyHiveChar) o).getWritableObject().getHiveChar(); if (!BaseCharUtils.doesPrimitiveMatchTypeParams( ret, (CharTypeInfo)typeInfo)) { HiveChar newValue = new HiveChar(ret, ((CharTypeInfo)typeInfo).getLength()); return newValue; } return ret; }
case PRIMITIVE: PrimitiveTypeInfo ptype = (PrimitiveTypeInfo) type; switch (ptype.getPrimitiveCategory()) { case VARCHAR: // Teradata Type: VARCHAR String st = in.readVarchar(); } else { HiveVarcharWritable r = reuse == null ? new HiveVarcharWritable() : (HiveVarcharWritable) reuse; r.set(st, ((VarcharTypeInfo) type).getLength()); return r; int length = ctype.getLength(); String c = in.readChar(length * getCharByteNum(charCharset)); if (isNull) { return null; } else { HiveCharWritable r = reuse == null ? new HiveCharWritable() : (HiveCharWritable) reuse; r.set(c, length); return r; int precision = dtype.precision(); int scale = dtype.scale(); HiveDecimal hd = in.readDecimal(scale, getDecimalByteNum(precision)); if (isNull) { throw new SerDeException("Unrecognized type: " + ptype.getPrimitiveCategory());
"Unsupported non-primitive type " + typeString); switch (((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()) { case BOOLEAN: typeDesc = new TypeDesc(TypeDesc.Type.BOOLEAN); case CHAR: CharTypeInfo charTypeInfo = (CharTypeInfo) typeInfo; typeDesc = new TypeDesc(TypeDesc.Type.CHAR, charTypeInfo.getLength()); break; case VARCHAR: VarcharTypeInfo varcharTypeInfo = (VarcharTypeInfo) typeInfo; typeDesc = new TypeDesc(TypeDesc.Type.VARCHAR, varcharTypeInfo.getLength()); break; case DATE: DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) typeInfo; typeDesc = new TypeDesc(TypeDesc.Type.DECIMAL, decimalTypeInfo.getPrecision(), decimalTypeInfo.getScale()); break; default:
LazyBinaryHiveChar(WritableHiveCharObjectInspector oi) { super(oi); maxLength = ((CharTypeInfo)oi.getTypeInfo()).getLength(); data = new HiveCharWritable(); }
assert ((VarcharTypeInfo) ti).getLength() >= hv.getHiveVarchar().getCharacterLength(); assert ((CharTypeInfo) ti).getLength() >= hc.getHiveChar().getCharacterLength(); out.writeChar(hc, getCharByteNum(charCharset) * ((CharTypeInfo) ti).getLength()); return; int precision = dtype.precision(); int scale = dtype.scale(); HiveDecimalObjectInspector hdoi = (HiveDecimalObjectInspector) poi; HiveDecimalWritable hd = hdoi.getPrimitiveWritableObject(objectForField); assert (dtype.getPrecision() >= hd.precision());
private static Type getPrimitiveType(final ObjectInspector fieldInspector) { final PrimitiveCategory primitiveCategory = ((PrimitiveObjectInspector) fieldInspector) .getPrimitiveCategory(); if (HiveTypeMapping.getHIVE_TO_CANONICAL().containsKey(primitiveCategory.name())) { return HiveTypeMapping.getHIVE_TO_CANONICAL().get(primitiveCategory.name()); } switch (primitiveCategory) { case DECIMAL: final DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) ((PrimitiveObjectInspector) fieldInspector) .getTypeInfo(); return DecimalType.createDecimalType(decimalTypeInfo.precision(), decimalTypeInfo.getScale()); case CHAR: final int cLength = ((CharTypeInfo) ((PrimitiveObjectInspector) fieldInspector).getTypeInfo()).getLength(); return CharType.createCharType(cLength); case VARCHAR: final int vLength = ((VarcharTypeInfo) ((PrimitiveObjectInspector) fieldInspector) .getTypeInfo()).getLength(); return VarcharType.createVarcharType(vLength); default: return null; } }
final PrimitiveCategory primitiveCategory = primitiveTypeInfo.getPrimitiveCategory(); final Writable primitiveWritable = VectorizedBatchUtil.getPrimitiveWritable(primitiveCategory); final int adjustedLength = StringExpr.truncate( bytes, start, length, ((VarcharTypeInfo) primitiveTypeInfo).getLength()); if (adjustedLength == 0) { hiveVarcharWritable.set(EMPTY_STRING, -1); final int maxLength = ((CharTypeInfo) primitiveTypeInfo).getLength(); if (bytes == null || length == 0) { if (length > 0) { nullBytesReadError(primitiveCategory, batchIndex); hiveCharWritable.set(EMPTY_STRING, maxLength); } else { final int adjustedLength = StringExpr.rightTrimAndTruncate(bytes, start, length, ((CharTypeInfo) primitiveTypeInfo).getLength()); hiveCharWritable.set(EMPTY_STRING, maxLength); } else { hiveCharWritable.set( new String(bytes, start, adjustedLength, Charsets.UTF_8), maxLength);
public Object copyObject(Object o) { if (o == null) { return null; } if (o instanceof Text) { String str = ((Text)o).toString(); HiveCharWritable hcw = new HiveCharWritable(); hcw.set(str, ((CharTypeInfo)typeInfo).getLength()); return hcw; } HiveCharWritable writable = (HiveCharWritable) o; if (doesWritableMatchTypeParams((HiveCharWritable) o)) { return new HiveCharWritable(writable); } return getWritableWithParams(writable); }
hCatTblCols.clear(); PrimitiveTypeInfo tInfo; tInfo = new PrimitiveTypeInfo(); tInfo.setTypeName(HCatFieldSchema.Type.INT.name().toLowerCase()); hCatTblCols.add(new HCatFieldSchema("id", tInfo, "")); tInfo = new PrimitiveTypeInfo(); tInfo.setTypeName(HCatFieldSchema.Type.STRING.name().toLowerCase()); hCatTblCols switch(gen.getHCatType()) { case CHAR: tInfo = new CharTypeInfo(gen.getHCatPrecision()); break; case VARCHAR: tInfo = new VarcharTypeInfo(gen.getHCatPrecision()); break; case DECIMAL: tInfo = new DecimalTypeInfo(gen.getHCatPrecision(), gen.getHCatScale()); break;
CharTypeInfo charTypeInfo = new CharTypeInfo(10); col1Expr = new ExprNodeColumnDesc(charTypeInfo, "col1", "table", false); constDesc = new ExprNodeConstantDesc(charTypeInfo, new HiveChar("Alpha", 10)); constDesc2 = new ExprNodeConstantDesc(charTypeInfo, new HiveChar("Bravo", 10)); VarcharTypeInfo varcharTypeInfo = new VarcharTypeInfo(10); col1Expr = new ExprNodeColumnDesc(varcharTypeInfo, "col1", "table", false); constDesc = new ExprNodeConstantDesc(varcharTypeInfo, new HiveVarchar("Alpha", 10));
} else if (outputOI instanceof WritableHiveCharObjectInspector) { WritableHiveCharObjectInspector writableHiveCharObjectOI = (WritableHiveCharObjectInspector) outputOI; int maxLength = ((CharTypeInfo) writableHiveCharObjectOI.getTypeInfo()).getLength(); BytesColumnVector bv = (BytesColumnVector) colVec; hiveCharWritable = writableHiveCharObjectOI.getPrimitiveWritableObject(value); Text t = hiveCharWritable.getTextValue(); } else if (outputOI instanceof WritableHiveVarcharObjectInspector) { WritableHiveVarcharObjectInspector writableHiveVarcharObjectOI = (WritableHiveVarcharObjectInspector) outputOI; int maxLength = ((VarcharTypeInfo) writableHiveVarcharObjectOI.getTypeInfo()).getLength(); BytesColumnVector bv = (BytesColumnVector) colVec;
} else if (oi instanceof HiveVarcharObjectInspector) { VarcharTypeInfo type = (VarcharTypeInfo) ((HiveVarcharObjectInspector) oi).getTypeInfo(); return type.getLength(); } else if (oi instanceof HiveCharObjectInspector) { CharTypeInfo type = (CharTypeInfo) ((HiveCharObjectInspector) oi).getTypeInfo(); return type.getLength();
public int getMaxLength() { CharTypeInfo ti = (CharTypeInfo) typeInfo; return ti.getLength(); } }