/** * Initialize the visitor to use the QueryDefDeserializer Use the order * defined in QueryDefWalker to visit the QueryDef * * @param hiveConf * @throws HiveException */ protected void reconstructQueryDef(Configuration hiveConf) throws HiveException { PTFDeserializer dS = new PTFDeserializer(conf, (StructObjectInspector)inputObjInspectors[0], hiveConf); dS.initializePTFChain(conf.getFuncDef()); }
public static TableDesc createTableDesc(StructObjectInspector oI) { Map<String,String> props = new HashMap<String,String>(); PTFDeserializer.addOIPropertiestoSerDePropsMap(oI, props); String colNames = props.get(serdeConstants.LIST_COLUMNS); String colTypes = props.get(serdeConstants.LIST_COLUMN_TYPES); TableDesc tblDesc = new TableDesc( PTFSequenceFileInputFormat.class, PTFHiveSequenceFileOutputFormat.class, Utilities.makeProperties( serdeConstants.SERIALIZATION_FORMAT, ""+ Utilities.ctrlaCode, serdeConstants.LIST_COLUMNS, colNames.toString(), serdeConstants.LIST_COLUMN_TYPES,colTypes.toString(), serdeConstants.SERIALIZATION_LIB,LazyBinarySerDe.class.getName())); return tblDesc; }
public void initializePTFChain(PartitionedTableFunctionDef tblFnDef) throws HiveException { Deque<PTFInputDef> ptfChain = new ArrayDeque<PTFInputDef>(); PTFInputDef currentDef = tblFnDef; while (currentDef != null) { ptfChain.push(currentDef); currentDef = currentDef.getInput(); } while (!ptfChain.isEmpty()) { currentDef = ptfChain.pop(); if (currentDef instanceof PTFQueryInputDef) { initialize((PTFQueryInputDef) currentDef, inputOI); } else if (currentDef instanceof WindowTableFunctionDef) { initializeWindowing((WindowTableFunctionDef) currentDef); } else { initialize((PartitionedTableFunctionDef) currentDef); } } PTFDeserializer.alterOutputOIForStreaming(ptfDesc); }
(WindowingTableFunctionResolver) constructResolver(def.getResolverClassName()); tResolver.initialize(hConf, ptfDesc, def, tEval); if (wFnDef.getArgs() != null) { for (PTFExpressionDef arg : wFnDef.getArgs()) { initialize(arg, inpShape); initialize(wFrmDef, inpShape); setupWdwFnEvaluator(wFnDef); PTFDeserializer.addInputColumnsToList(inpShape, aliases, fieldOIs); StructObjectInspector wdwOutOI = ObjectInspectorFactory.getStandardStructObjectInspector( aliases, fieldOIs); tResolver.setWdwProcessingOutputOI(wdwOutOI); initialize(def.getOutputShape(), wdwOutOI); tResolver.initializeOutputOI();
initialize(arg, inpShape); TableFunctionResolver tResolver = constructResolver(def.getResolverClassName()); tResolver.initialize(hConf, ptfDesc, def, tEval); initialize(def.getRawInputShape(), tEval.getRawInputOI()); } else { def.setRawInputShape(inpShape); initialize(def.getOutputShape(), tEval.getOutputOI());
protected void initialize(PTFQueryInputDef def, StructObjectInspector OI) throws HiveException { ShapeDetails outShape = def.getOutputShape(); initialize(outShape, OI); }
public PTFDesc translate(PTFInvocationSpec qSpec, SemanticAnalyzer semAly, HiveConf hCfg, RowResolver inputRR, UnparseTranslator unparseT) throws SemanticException { init(semAly, hCfg, inputRR, unparseT); ptfInvocation = qSpec; ptfDesc = new PTFDesc(); ptfDesc.setCfg(hCfg); ptfDesc.setLlInfo(llInfo); translatePTFChain(); PTFDeserializer.alterOutputOIForStreaming(ptfDesc); return ptfDesc; }
protected void initialize(PTFExpressionDef eDef, ShapeDetails inpShape) throws HiveException { ExprNodeDesc exprNode = eDef.getExprNode(); ExprNodeEvaluator exprEval = WindowingExprNodeEvaluatorFactory.get(llInfo, exprNode); ObjectInspector oi = initExprNodeEvaluator(exprEval, exprNode, inpShape); eDef.setExprEvaluator(exprEval); eDef.setOI(oi); }
@SuppressWarnings({"unchecked"}) public static void addOIPropertiestoSerDePropsMap(StructObjectInspector OI, Map<String, String> serdePropsMap) { if (serdePropsMap == null) { return; } ArrayList<? extends Object>[] tInfo = getTypeMap(OI); ArrayList<String> columnNames = (ArrayList<String>) tInfo[0]; ArrayList<TypeInfo> fields = (ArrayList<TypeInfo>) tInfo[1]; StringBuilder cNames = new StringBuilder(); StringBuilder cTypes = new StringBuilder(); for (int i = 0; i < fields.size(); i++) { cNames.append(i > 0 ? "," : ""); cTypes.append(i > 0 ? "," : ""); cNames.append(columnNames.get(i)); cTypes.append(fields.get(i).getTypeName()); } serdePropsMap.put(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS, cNames.toString()); serdePropsMap.put(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES, cTypes.toString()); }
(WindowingTableFunctionResolver) constructResolver(def.getResolverClassName()); tResolver.initialize(ptfDesc, def, tEval); if (wFnDef.getArgs() != null) { for (PTFExpressionDef arg : wFnDef.getArgs()) { initialize(arg, inpShape); initialize(wFrmDef, inpShape); setupWdwFnEvaluator(wFnDef); PTFDeserializer.addInputColumnsToList(inpShape, aliases, fieldOIs); StructObjectInspector wdwOutOI = ObjectInspectorFactory.getStandardStructObjectInspector( aliases, fieldOIs); tResolver.setWdwProcessingOutputOI(wdwOutOI); initialize(def.getOutputShape(), wdwOutOI); tResolver.initializeOutputOI();
initialize(arg, inpShape); TableFunctionResolver tResolver = constructResolver(def.getResolverClassName()); tResolver.initialize(ptfDesc, def, tEval); initialize(def.getRawInputShape(), tEval.getRawInputOI()); } else { def.setRawInputShape(inpShape); initialize(def.getOutputShape(), tEval.getOutputOI());
protected void initialize(PTFQueryInputDef def, StructObjectInspector OI) throws HiveException { ShapeDetails outShape = def.getOutputShape(); initialize(outShape, OI); }
public PTFDesc translate(PTFInvocationSpec qSpec, SemanticAnalyzer semAly, HiveConf hCfg, RowResolver inputRR, UnparseTranslator unparseT) throws SemanticException { init(semAly, hCfg, inputRR, unparseT); ptfInvocation = qSpec; ptfDesc = new PTFDesc(); ptfDesc.setCfg(hCfg); ptfDesc.setLlInfo(llInfo); translatePTFChain(); PTFDeserializer.alterOutputOIForStreaming(ptfDesc); return ptfDesc; }
protected void initialize(PTFExpressionDef eDef, ShapeDetails inpShape) throws HiveException { ExprNodeDesc exprNode = eDef.getExprNode(); ExprNodeEvaluator exprEval = WindowingExprNodeEvaluatorFactory.get(llInfo, exprNode); ObjectInspector oi = initExprNodeEvaluator(exprEval, exprNode, inpShape); eDef.setExprEvaluator(exprEval); eDef.setOI(oi); }
@SuppressWarnings({"unchecked"}) public static void addOIPropertiestoSerDePropsMap(StructObjectInspector OI, Map<String, String> serdePropsMap) { if (serdePropsMap == null) { return; } ArrayList<? extends Object>[] tInfo = getTypeMap(OI); ArrayList<String> columnNames = (ArrayList<String>) tInfo[0]; ArrayList<TypeInfo> fields = (ArrayList<TypeInfo>) tInfo[1]; StringBuilder cNames = new StringBuilder(); StringBuilder cTypes = new StringBuilder(); for (int i = 0; i < fields.size(); i++) { cNames.append(i > 0 ? "," : ""); cTypes.append(i > 0 ? "," : ""); cNames.append(columnNames.get(i)); cTypes.append(fields.get(i).getTypeName()); } serdePropsMap.put(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS, cNames.toString()); serdePropsMap.put(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES, cTypes.toString()); }
(WindowingTableFunctionResolver) constructResolver(def.getResolverClassName()); tResolver.initialize(ptfDesc, def, tEval); if (wFnDef.getArgs() != null) { for (PTFExpressionDef arg : wFnDef.getArgs()) { initialize(arg, inpShape); initialize(wFrmDef.getStart(), inpShape); initialize(wFrmDef.getEnd(), inpShape); setupWdwFnEvaluator(wFnDef); PTFDeserializer.addInputColumnsToList(inpShape, aliases, fieldOIs); StructObjectInspector wdwOutOI = ObjectInspectorFactory.getStandardStructObjectInspector( aliases, fieldOIs); tResolver.setWdwProcessingOutputOI(wdwOutOI); initialize(def.getOutputShape(), wdwOutOI); tResolver.initializeOutputOI();
public void initializePTFChain(PartitionedTableFunctionDef tblFnDef) throws HiveException { Deque<PTFInputDef> ptfChain = new ArrayDeque<PTFInputDef>(); PTFInputDef currentDef = tblFnDef; while (currentDef != null) { ptfChain.push(currentDef); currentDef = currentDef.getInput(); } while (!ptfChain.isEmpty()) { currentDef = ptfChain.pop(); if (currentDef instanceof PTFQueryInputDef) { initialize((PTFQueryInputDef) currentDef, inputOI); } else if (currentDef instanceof WindowTableFunctionDef) { initializeWindowing((WindowTableFunctionDef) currentDef); } else { initialize((PartitionedTableFunctionDef) currentDef); } } PTFDeserializer.alterOutputOIForStreaming(ptfDesc); }
/** * Initialize the visitor to use the QueryDefDeserializer Use the order * defined in QueryDefWalker to visit the QueryDef * * @param hiveConf * @throws HiveException */ protected void reconstructQueryDef(Configuration hiveConf) throws HiveException { PTFDeserializer dS = new PTFDeserializer(conf, (StructObjectInspector)inputObjInspectors[0], hiveConf); dS.initializePTFChain(conf.getFuncDef()); }
initialize(arg, inpShape); TableFunctionResolver tResolver = constructResolver(def.getResolverClassName()); tResolver.initialize(ptfDesc, def, tEval); initialize(def.getRawInputShape(), tEval.getRawInputOI()); } else { def.setRawInputShape(inpShape); initialize(def.getOutputShape(), tEval.getOutputOI());
protected static AbstractSerDe createLazyBinarySerDe(Configuration cfg, StructObjectInspector oi, Map<String, String> serdePropsMap) throws SerDeException { serdePropsMap = serdePropsMap == null ? new LinkedHashMap<String, String>() : serdePropsMap; PTFDeserializer.addOIPropertiestoSerDePropsMap(oi, serdePropsMap); AbstractSerDe serDe = new LazyBinarySerDe(); Properties p = new Properties(); p.setProperty(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS, serdePropsMap.get(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS)); p.setProperty( org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES, serdePropsMap.get(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES)); SerDeUtils.initializeSerDe(serDe, cfg, p, null); return serDe; }