/** * Gets the serde lib. * * @return the serde lib */ public String getSerdeLib() { return getSerDeInfo().getSerializationLib(); }
/** * Gets the serde. * * @return the serde */ public String getSerDe() { return this.sd.getSerdeInfo().getSerializationLib(); }
public String getSerializationLib() { return getSerdeInfo().getSerializationLib(); }
public String getSerializationLib() { return getSerdeInfo().getSerializationLib(); }
private boolean isAvro(Table table) { return AvroSerDe.class.getName().equals(table.getSd().getSerdeInfo().getSerializationLib()); } }
public static boolean isAvroTableWithExternalSchema(Table table) { if (table.getSd().getSerdeInfo().getSerializationLib().equals(AVRO_SERDE_CLASSNAME)) { String schemaUrl = table.getParameters().get(AVRO_SCHEMA_URL_PROPERTY); if (schemaUrl != null && !schemaUrl.isEmpty()) { return true; } } return false; }
static StorerInfo extractStorerInfo(StorageDescriptor sd, Map<String, String> properties) throws IOException { Properties hcatProperties = new Properties(); for (String key : properties.keySet()) { hcatProperties.put(key, properties.get(key)); } // also populate with StorageDescriptor->SerDe.Parameters for (Map.Entry<String, String> param : sd.getSerdeInfo().getParameters().entrySet()) { hcatProperties.put(param.getKey(), param.getValue()); } return new StorerInfo( sd.getInputFormat(), sd.getOutputFormat(), sd.getSerdeInfo().getSerializationLib(), properties.get(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE), hcatProperties); }
public SerDeInfoWrapper(SerDeInfo serDeInfo) { this.serDeInfo = serDeInfo; this.name = serDeInfo.getName(); this.serializationLib = serDeInfo.getSerializationLib(); this.parameters = serDeInfo.getParameters(); }
public static Class<? extends Deserializer> getDeserializerClass( Configuration conf, org.apache.hadoop.hive.metastore.api.Table table) throws Exception { String lib = table.getSd().getSerdeInfo().getSerializationLib(); return lib == null ? null : conf.getClassByName(lib).asSubclass(Deserializer.class); }
public static boolean isAvroTableWithSchemaSet(org.apache.hadoop.hive.metastore.api.Table table) { if (table.getParameters() == null) { return false; } StorageDescriptor storageDescriptor = table.getSd(); if (storageDescriptor == null) { throw new PrestoException(HIVE_INVALID_METADATA, "Table does not contain a storage descriptor: " + table); } SerDeInfo serdeInfo = storageDescriptor.getSerdeInfo(); if (serdeInfo == null) { throw new PrestoException(HIVE_INVALID_METADATA, "Table storage descriptor is missing SerDe info"); } return serdeInfo.getSerializationLib() != null && table.getParameters().get(AVRO_SCHEMA_URL_KEY) != null && serdeInfo.getSerializationLib().equals(AVRO.getSerDe()); }
private static State getSerDeProps(SerDeInfo si) { State serDeProps = new State(); for (Map.Entry<String, String> entry : si.getParameters().entrySet()) { serDeProps.setProp(entry.getKey(), entry.getValue()); } if (si.isSetSerializationLib()) { serDeProps.setProp(HiveConstants.SERDE_TYPE, si.getSerializationLib()); } return serDeProps; }
StorageDescriptorKey(String baseLocation, StorageDescriptor sd) { this.sd = sd; this.baseLocation = baseLocation; if (sd == null) { hashCode = Objects.hashCode(baseLocation); } else { // use the baseLocation provided instead of sd.getLocation() hashCode = Objects.hash(sd.getSerdeInfo() == null ? null : sd.getSerdeInfo().getSerializationLib(), sd.getInputFormat(), sd.getOutputFormat(), baseLocation, sd.getCols()); } }
public Optional<String> getFileFormat() { String serdeLib = this.hivePartition.getTPartition().getSd().getSerdeInfo().getSerializationLib(); for (HiveSerDeWrapper.BuiltInHiveSerDe hiveSerDe : HiveSerDeWrapper.BuiltInHiveSerDe.values()) { if (hiveSerDe.toString().equalsIgnoreCase(serdeLib)) { return Optional.fromNullable(hiveSerDe.name()); } } return Optional.<String>absent(); }
/** * Tell whether a hive table is actually an Avro table * @param targetTable * @return * @throws IOException */ public static boolean isHiveTableAvroType(Table targetTable) throws IOException { String serializationLib = targetTable.getTTable().getSd().getSerdeInfo().getSerializationLib(); String inputFormat = targetTable.getTTable().getSd().getInputFormat(); String outputFormat = targetTable.getTTable().getSd().getOutputFormat(); return inputFormat.endsWith("AvroContainerInputFormat") || outputFormat.endsWith("AvroContainerOutputFormat") || serializationLib.endsWith("AvroSerDe"); } }
public Object getFieldValue(_Fields field) { switch (field) { case NAME: return getName(); case SERIALIZATION_LIB: return getSerializationLib(); case PARAMETERS: return getParameters(); case DESCRIPTION: return getDescription(); case SERIALIZER_CLASS: return getSerializerClass(); case DESERIALIZER_CLASS: return getDeserializerClass(); case SERDE_TYPE: return getSerdeType(); } throw new IllegalStateException(); }
public static void fromMetastoreApiStorageDescriptor(StorageDescriptor storageDescriptor, Storage.Builder builder, String tablePartitionName) { SerDeInfo serdeInfo = storageDescriptor.getSerdeInfo(); if (serdeInfo == null) { throw new PrestoException(HIVE_INVALID_METADATA, "Table storage descriptor is missing SerDe info"); } builder.setStorageFormat(StorageFormat.createNullable(serdeInfo.getSerializationLib(), storageDescriptor.getInputFormat(), storageDescriptor.getOutputFormat())) .setLocation(nullToEmpty(storageDescriptor.getLocation())) .setBucketProperty(HiveBucketProperty.fromStorageDescriptor(storageDescriptor, tablePartitionName)) .setSkewed(storageDescriptor.isSetSkewedInfo() && storageDescriptor.getSkewedInfo().isSetSkewedColNames() && !storageDescriptor.getSkewedInfo().getSkewedColNames().isEmpty()) .setSerdeParameters(serdeInfo.getParameters() == null ? ImmutableMap.of() : serdeInfo.getParameters()); }
private List<FieldSchema> getColsInternal(boolean forMs) { try { String serializationLib = tPartition.getSd().getSerdeInfo().getSerializationLib(); // Do the lightweight check for general case. if (Table.hasMetastoreBasedSchema(SessionState.getSessionConf(), serializationLib)) { return tPartition.getSd().getCols(); } else if (forMs && !Table.shouldStoreFieldsInMetastore( SessionState.getSessionConf(), serializationLib, table.getParameters())) { return Hive.getFieldsFromDeserializerForMsStorage(table, getDeserializer()); } return HiveMetaStoreUtils.getFieldsFromDeserializer(table.getTableName(), getDeserializer()); } catch (Exception e) { LOG.error("Unable to get cols from serde: " + tPartition.getSd().getSerdeInfo().getSerializationLib(), e); } return new ArrayList<FieldSchema>(); }
private MSerDeInfo convertToMSerDeInfo(SerDeInfo ms) throws MetaException { if (ms == null) { throw new MetaException("Invalid SerDeInfo object"); } return new MSerDeInfo(ms.getName(), ms.getSerializationLib(), ms.getParameters(), ms.getDescription(), ms.getSerializerClass(), ms.getDeserializerClass(), ms.getSerdeType() == null ? 0 : ms.getSerdeType().getValue()); }
@Test public void addSerde() throws TException { String serdeName = uniqueSerdeName(); SerDeInfo serDeInfo = new SerDeInfo(serdeName, "serdeLib", Collections.singletonMap("a", "b")); serDeInfo.setSerializerClass("serializer"); serDeInfo.setDeserializerClass("deserializer"); serDeInfo.setDescription("description"); serDeInfo.setSerdeType(SerdeType.SCHEMA_REGISTRY); client.addSerDe(serDeInfo); serDeInfo = client.getSerDe(serdeName); Assert.assertEquals(serdeName, serDeInfo.getName()); Assert.assertEquals("serdeLib", serDeInfo.getSerializationLib()); Assert.assertEquals(1, serDeInfo.getParametersSize()); Assert.assertEquals("b", serDeInfo.getParameters().get("a")); Assert.assertEquals("serializer", serDeInfo.getSerializerClass()); Assert.assertEquals("deserializer", serDeInfo.getDeserializerClass()); Assert.assertEquals("description", serDeInfo.getDescription()); Assert.assertEquals(SerdeType.SCHEMA_REGISTRY, serDeInfo.getSerdeType()); }
private Partition createPartition(List<String> vals, Table table) throws MetaException { return new PartitionBuilder() .inTable(table) .setValues(vals) .addPartParam("key1", "S1") .addPartParam("key2", "S2") .addPartParam(EXCLUDE_KEY_PREFIX + "key1", "e1") .addPartParam(EXCLUDE_KEY_PREFIX + "key2", "e2") .setBucketCols(table.getSd().getBucketCols()) .setSortCols(table.getSd().getSortCols()) .setSerdeName(table.getSd().getSerdeInfo().getName()) .setSerdeLib(table.getSd().getSerdeInfo().getSerializationLib()) .setSerdeParams(table.getSd().getSerdeInfo().getParameters()) .build(conf); }