/** * Getter for SerDe parameters. * @return The SerDe parameters. */ public Map<String, String> getSerdeParams() { return this.sd.getSerdeInfo().getParameters(); }
public HCatTable serdeParam(String paramName, String value) { SerDeInfo serdeInfo = getSerDeInfo(); if (serdeInfo.getParameters() == null) { serdeInfo.setParameters(new HashMap<String, String>()); } serdeInfo.getParameters().put(paramName, value); return this; }
/** * Returns parameters such as field delimiter,etc. */ public Map<String, String> getSerdeParams() { return getSerDeInfo().getParameters(); }
public String setSerdeParam(String param, String value) { return getSerdeInfo().getParameters().put(param, value); }
public String getSerdeParam(String param) { return getSerdeInfo().getParameters().get(param); }
public String getSerdeParam(String param) { return getSerdeInfo().getParameters().get(param); }
public String setSerdeParam(String param, String value) { return getSerdeInfo().getParameters().put(param, value); }
public void clearSerDeInfo() { tTable.getSd().getSerdeInfo().getParameters().clear(); }
public void clearSerDeInfo() { tTable.getSd().getSerdeInfo().getParameters().clear(); }
static StorerInfo extractStorerInfo(StorageDescriptor sd, Map<String, String> properties) throws IOException { Properties hcatProperties = new Properties(); for (String key : properties.keySet()) { hcatProperties.put(key, properties.get(key)); } // also populate with StorageDescriptor->SerDe.Parameters for (Map.Entry<String, String> param : sd.getSerdeInfo().getParameters().entrySet()) { hcatProperties.put(param.getKey(), param.getValue()); } return new StorerInfo( sd.getInputFormat(), sd.getOutputFormat(), sd.getSerdeInfo().getSerializationLib(), properties.get(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE), hcatProperties); }
public SerDeInfoWrapper(SerDeInfo serDeInfo) { this.serDeInfo = serDeInfo; this.name = serDeInfo.getName(); this.serializationLib = serDeInfo.getSerializationLib(); this.parameters = serDeInfo.getParameters(); }
private String getHBaseTableName(Table tbl) { // Give preference to TBLPROPERTIES over SERDEPROPERTIES // (really we should only use TBLPROPERTIES, so this is just // for backwards compatibility with the original specs). String tableName = tbl.getParameters().get(HBaseSerDe.HBASE_TABLE_NAME); if (tableName == null) { //convert to lower case in case we are getting from serde tableName = tbl.getSd().getSerdeInfo().getParameters().get(HBaseSerDe.HBASE_TABLE_NAME); //standardize to lower case if (tableName != null) { tableName = tableName.toLowerCase(); } } if (tableName == null) { tableName = (tbl.getDbName() + "." + tbl.getTableName()).toLowerCase(); if (tableName.startsWith(HBaseStorageHandler.DEFAULT_PREFIX)) { tableName = tableName.substring(HBaseStorageHandler.DEFAULT_PREFIX.length()); } } return tableName; }
static void setSerdeParams(String SERDE_PARAMS, boolean convertMapNullsToEmptyStrings, PersistenceManager pm, TreeMap<Long, SerDeInfo> serdes, String serdeIds) throws MetaException { String queryText; queryText = "select \"SERDE_ID\", \"PARAM_KEY\", \"PARAM_VALUE\" from " + SERDE_PARAMS + "" + " where \"SERDE_ID\" in (" + serdeIds + ") and \"PARAM_KEY\" is not null" + " order by \"SERDE_ID\" asc"; loopJoinOrderedResult(pm, serdes, queryText, 0, new ApplyFunc<SerDeInfo>() { @Override public void apply(SerDeInfo t, Object[] fields) { t.putToParameters((String)fields[1], extractSqlClob(fields[2])); }}); // Perform conversion of null map values for (SerDeInfo t : serdes.values()) { t.setParameters(MetaStoreServerUtils.trimMapNulls(t.getParameters(), convertMapNullsToEmptyStrings)); } }
private static State getSerDeProps(SerDeInfo si) { State serDeProps = new State(); for (Map.Entry<String, String> entry : si.getParameters().entrySet()) { serDeProps.setProp(entry.getKey(), entry.getValue()); } if (si.isSetSerializationLib()) { serDeProps.setProp(HiveConstants.SERDE_TYPE, si.getSerializationLib()); } return serDeProps; }
public static void fromMetastoreApiStorageDescriptor(StorageDescriptor storageDescriptor, Storage.Builder builder, String tablePartitionName) { SerDeInfo serdeInfo = storageDescriptor.getSerdeInfo(); if (serdeInfo == null) { throw new PrestoException(HIVE_INVALID_METADATA, "Table storage descriptor is missing SerDe info"); } builder.setStorageFormat(StorageFormat.createNullable(serdeInfo.getSerializationLib(), storageDescriptor.getInputFormat(), storageDescriptor.getOutputFormat())) .setLocation(nullToEmpty(storageDescriptor.getLocation())) .setBucketProperty(HiveBucketProperty.fromStorageDescriptor(storageDescriptor, tablePartitionName)) .setSkewed(storageDescriptor.isSetSkewedInfo() && storageDescriptor.getSkewedInfo().isSetSkewedColNames() && !storageDescriptor.getSkewedInfo().getSkewedColNames().isEmpty()) .setSerdeParameters(serdeInfo.getParameters() == null ? ImmutableMap.of() : serdeInfo.getParameters()); }
public HCatTable(String dbName, String tableName) { this.dbName = StringUtils.isBlank(dbName)? Warehouse.DEFAULT_DATABASE_NAME : dbName; this.tableName = tableName; this.sd = new StorageDescriptor(); this.sd.setInputFormat(DEFAULT_INPUT_FORMAT_CLASS); this.sd.setOutputFormat(DEFAULT_OUTPUT_FORMAT_CLASS); this.sd.setSerdeInfo(new SerDeInfo()); this.sd.getSerdeInfo().setSerializationLib(DEFAULT_SERDE_CLASS); this.sd.getSerdeInfo().setParameters(new HashMap<String, String>()); this.sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_FORMAT, "1"); // Default serialization format. }
public Object getFieldValue(_Fields field) { switch (field) { case NAME: return getName(); case SERIALIZATION_LIB: return getSerializationLib(); case PARAMETERS: return getParameters(); case DESCRIPTION: return getDescription(); case SERIALIZER_CLASS: return getSerializerClass(); case DESERIALIZER_CLASS: return getDeserializerClass(); case SERDE_TYPE: return getSerdeType(); } throw new IllegalStateException(); }
private MSerDeInfo convertToMSerDeInfo(SerDeInfo ms) throws MetaException { if (ms == null) { throw new MetaException("Invalid SerDeInfo object"); } return new MSerDeInfo(ms.getName(), ms.getSerializationLib(), ms.getParameters(), ms.getDescription(), ms.getSerializerClass(), ms.getDeserializerClass(), ms.getSerdeType() == null ? 0 : ms.getSerdeType().getValue()); }
private void addSd(ArrayList<FieldSchema> cols, Table tbl) { StorageDescriptor sd = new StorageDescriptor(); sd.setCols(cols); sd.setCompressed(false); sd.setNumBuckets(1); sd.setParameters(new HashMap<String, String>()); sd.setBucketCols(new ArrayList<String>()); sd.setSerdeInfo(new SerDeInfo()); sd.getSerdeInfo().setName(tbl.getTableName()); sd.getSerdeInfo().setParameters(new HashMap<String, String>()); sd.getSerdeInfo().getParameters() .put(serdeConstants.SERIALIZATION_FORMAT, "1"); sd.setSortCols(new ArrayList<Order>()); sd.getSerdeInfo().setSerializationLib(LazySimpleSerDe.class.getName()); sd.setInputFormat(HiveInputFormat.class.getName()); sd.setOutputFormat(HiveOutputFormat.class.getName()); tbl.setSd(sd); }
private Partition createPartition(List<String> vals, Table table) throws MetaException { return new PartitionBuilder() .inTable(table) .setValues(vals) .addPartParam("key1", "S1") .addPartParam("key2", "S2") .addPartParam(EXCLUDE_KEY_PREFIX + "key1", "e1") .addPartParam(EXCLUDE_KEY_PREFIX + "key2", "e2") .setBucketCols(table.getSd().getBucketCols()) .setSortCols(table.getSd().getSortCols()) .setSerdeName(table.getSd().getSerdeInfo().getName()) .setSerdeLib(table.getSd().getSerdeInfo().getSerializationLib()) .setSerdeParams(table.getSd().getSerdeInfo().getParameters()) .build(conf); }