public StructField getField(String fld) { try { StructObjectInspector structObjectInspector = (StructObjectInspector) getDeserializer() .getObjectInspector(); return structObjectInspector.getStructFieldRef(fld); } catch (Exception e) { throw new RuntimeException(e); } }
public StructField getField(String fld) { try { StructObjectInspector structObjectInspector = (StructObjectInspector) getDeserializer() .getObjectInspector(); return structObjectInspector.getStructFieldRef(fld); } catch (Exception e) { throw new RuntimeException(e); } }
public ArrayList<StructField> getFields() { ArrayList<StructField> fields = new ArrayList<StructField>(); try { Deserializer decoder = getDeserializer(); // Expand out all the columns of the table StructObjectInspector structObjectInspector = (StructObjectInspector) decoder .getObjectInspector(); List<? extends StructField> fld_lst = structObjectInspector .getAllStructFieldRefs(); for (StructField field : fld_lst) { fields.add(field); } } catch (SerDeException e) { throw new RuntimeException(e); } return fields; }
public ArrayList<StructField> getFields() { ArrayList<StructField> fields = new ArrayList<StructField>(); try { Deserializer decoder = getDeserializer(); // Expand out all the columns of the table StructObjectInspector structObjectInspector = (StructObjectInspector) decoder .getObjectInspector(); List<? extends StructField> fld_lst = structObjectInspector .getAllStructFieldRefs(); for (StructField field : fld_lst) { fields.add(field); } } catch (SerDeException e) { throw new RuntimeException(e); } return fields; }
public static TableDesc getTableDesc(Table tbl) { Properties props = tbl.getMetadata(); props.put(serdeConstants.SERIALIZATION_LIB, tbl.getDeserializer().getClass().getName()); return (new TableDesc(tbl.getInputFormatClass(), tbl .getOutputFormatClass(), props)); }
public static TableDesc getTableDesc(Table tbl) { Properties props = tbl.getMetadata(); props.put(serdeConstants.SERIALIZATION_LIB, tbl.getDeserializer().getClass().getName()); return (new TableDesc(tbl.getInputFormatClass(), tbl .getOutputFormatClass(), props)); }
private List<FieldSchema> getColsInternal(boolean forMs) { String serializationLib = getSerializationLib(); try { // Do the lightweight check for general case. if (hasMetastoreBasedSchema(SessionState.getSessionConf(), serializationLib)) { return tTable.getSd().getCols(); } else if (forMs && !shouldStoreFieldsInMetastore( SessionState.getSessionConf(), serializationLib, tTable.getParameters())) { return Hive.getFieldsFromDeserializerForMsStorage(this, getDeserializer()); } else { return HiveMetaStoreUtils.getFieldsFromDeserializer(getTableName(), getDeserializer()); } } catch (Exception e) { LOG.error("Unable to get field from serde: " + serializationLib, e); } return new ArrayList<FieldSchema>(); }
private List<FieldSchema> getColsInternal(boolean forMs) { String serializationLib = getSerializationLib(); try { // Do the lightweight check for general case. if (hasMetastoreBasedSchema(SessionState.getSessionConf(), serializationLib)) { return tTable.getSd().getCols(); } else if (forMs && !shouldStoreFieldsInMetastore( SessionState.getSessionConf(), serializationLib, tTable.getParameters())) { return Hive.getFieldsFromDeserializerForMsStorage(this, getDeserializer()); } else { return MetaStoreUtils.getFieldsFromDeserializer(getTableName(), getDeserializer()); } } catch (Exception e) { LOG.error("Unable to get field from serde: " + serializationLib, e); } return new ArrayList<FieldSchema>(); }
static Object evalExprWithPart(ExprNodeDesc expr, Partition p, List<VirtualColumn> vcs) throws SemanticException { StructObjectInspector rowObjectInspector; Table tbl = p.getTable(); try { rowObjectInspector = (StructObjectInspector) tbl .getDeserializer().getObjectInspector(); } catch (SerDeException e) { throw new SemanticException(e); } try { return PartExprEvalUtils.evalExprWithPart(expr, p, vcs, rowObjectInspector); } catch (HiveException e) { throw new SemanticException(e); } }
static Object evalExprWithPart(ExprNodeDesc expr, Partition p, List<VirtualColumn> vcs) throws SemanticException { StructObjectInspector rowObjectInspector; Table tbl = p.getTable(); try { rowObjectInspector = (StructObjectInspector) tbl .getDeserializer().getObjectInspector(); } catch (SerDeException e) { throw new SemanticException(e); } try { return PartExprEvalUtils.evalExprWithPart(expr, p, vcs, rowObjectInspector); } catch (HiveException e) { throw new SemanticException(e); } }
private int updateColumns(Table tbl, Partition part) throws HiveException { String serializationLib = tbl.getSd().getSerdeInfo().getSerializationLib(); if (MetastoreConf.getStringCollection(conf, MetastoreConf.ConfVars.SERDES_USING_METASTORE_FOR_SCHEMA).contains(serializationLib)) { throw new HiveException(tbl.getTableName() + " has serde " + serializationLib + " for which schema " + "is already handled by HMS."); } Deserializer deserializer = tbl.getDeserializer(true); try { LOG.info("Updating metastore columns for table: {}", tbl.getTableName()); final List<FieldSchema> fields = HiveMetaStoreUtils.getFieldsFromDeserializer( tbl.getTableName(), deserializer); StorageDescriptor sd = retrieveStorageDescriptor(tbl, part); sd.setCols(fields); } catch (org.apache.hadoop.hive.serde2.SerDeException | MetaException e) { LOG.error("alter table update columns: {}", e); throw new HiveException(e, ErrorMsg.GENERIC_ERROR); } return 0; }
rowObjectInspector = (StructObjectInspector) viewTable.getDeserializer() .getObjectInspector(); } catch (SerDeException e) {
private static class ThreadLocalHive extends ThreadLocal<Hive> { @Override protected Hive initialValue() { return null; } @Override public synchronized void set(Hive hiveObj) { Hive currentHive = this.get(); if (currentHive != hiveObj) { // Remove/close current thread-local Hive object before overwriting with new Hive object. remove(); super.set(hiveObj); } } @Override public synchronized void remove() { Hive currentHive = this.get(); if (currentHive != null) { // Close the metastore connections before removing it from thread local hiveDB. currentHive.close(false); super.remove(); } } }
Deserializer deserializer = tbl.getDeserializer(); HiveStoragePredicateHandler.DecomposedPredicate decomposed = predicateHandler.decomposePredicate(
rowObjectInspector = (StructObjectInspector) viewTable.getDeserializer() .getObjectInspector(); } catch (SerDeException e) {
Utilities.getTableDesc(tbl), jobConf); Deserializer deserializer = tbl.getDeserializer(); HiveStoragePredicateHandler.DecomposedPredicate decomposed = predicateHandler.decomposePredicate(
try { StructObjectInspector rowObjectInspector = (StructObjectInspector) indexTableHandle .getDeserializer().getObjectInspector(); StructField field = rowObjectInspector.getStructFieldRef(rewriteQueryCtx.getIndexKey()); sigRS.add(new ColumnInfo(field.getFieldName(), TypeInfoUtils.getTypeInfoFromObjectInspector(
List<ColumnStatisticsObj> colStats = null; Deserializer deserializer = tbl.getDeserializer(true); if (deserializer instanceof AbstractSerDe) { String errorMsgs = ((AbstractSerDe) deserializer).getConfigurationErrors();
tbl.getDeserializer()));
assertEquals("SerializationLib is not set correctly", tbl .getSerializationLib(), ft.getSerializationLib()); assertEquals("Serde is not set correctly", tbl.getDeserializer() .getClass().getName(), ft.getDeserializer().getClass().getName()); } catch (HiveException e) { System.err.println(StringUtils.stringifyException(e));