/** * @return The table parameters. * @see org.apache.hadoop.hive.metastore.api.Table#getParameters() */ public Map<String, String> getParameters() { return tTable.getParameters(); }
public static boolean isNonNativeTable(Table table) { if (table == null || table.getParameters() == null) { return false; } return (table.getParameters().get(hive_metastoreConstants.META_TABLE_STORAGE) != null); }
private static boolean isPrestoView(Table table) { return "true".equals(table.getParameters().get(PRESTO_VIEW_FLAG)); }
/** * When a table is marked transactional=true but transactional_properties is not set then * transactional_properties should take on the default value. Easier to make this explicit in * table definition than keep checking everywhere if it's set or not. */ private void normalizeTransactionalPropertyDefault(Table table) { table.getParameters().put(hive_metastoreConstants.TABLE_TRANSACTIONAL_PROPERTIES, DEFAULT_TRANSACTIONAL_PROPERTY); } /**
private boolean noAutoCompactSet(Table t) { String noAutoCompact = t.getParameters().get(hive_metastoreConstants.TABLE_NO_AUTO_COMPACT); if (noAutoCompact == null) { noAutoCompact = t.getParameters().get(hive_metastoreConstants.TABLE_NO_AUTO_COMPACT.toUpperCase()); } return noAutoCompact != null && noAutoCompact.equalsIgnoreCase("true"); } }
private static boolean is_partition_spec_grouping_enabled(Table table) { Map<String, String> parameters = table.getParameters(); return parameters.containsKey("hive.hcatalog.partition.spec.grouping.enabled") && parameters.get("hive.hcatalog.partition.spec.grouping.enabled").equalsIgnoreCase("true"); }
/** * Should produce the same result as * {@link org.apache.hadoop.hive.ql.io.AcidUtils#isAcidTable(org.apache.hadoop.hive.ql.metadata.Table)}. */ public static boolean isAcidTable(Table table) { return TxnUtils.isTransactionalTable(table) && TransactionalValidationListener.DEFAULT_TRANSACTIONAL_PROPERTY.equals(table.getParameters() .get(hive_metastoreConstants.TABLE_TRANSACTIONAL_PROPERTIES)); }
private static boolean isMustPurge(EnvironmentContext envContext, Table tbl) { // Data needs deletion. Check if trash may be skipped. // Trash may be skipped iff: // 1. deleteData == true, obviously. // 2. tbl is external. // 3. Either // 3.1. User has specified PURGE from the commandline, and if not, // 3.2. User has set the table to auto-purge. return ((envContext != null) && Boolean.parseBoolean(envContext.getProperties().get("ifPurge"))) || (tbl.isSetParameters() && "true".equalsIgnoreCase(tbl.getParameters().get("auto.purge"))); } private void deleteParentRecursive(Path parent, int depth, boolean mustPurge, boolean needRecycle)
public static boolean isAvroTableWithExternalSchema(Table table) { if (table.getSd().getSerdeInfo().getSerializationLib().equals(AVRO_SERDE_CLASSNAME)) { String schemaUrl = table.getParameters().get(AVRO_SCHEMA_URL_PROPERTY); if (schemaUrl != null && !schemaUrl.isEmpty()) { return true; } } return false; }
/** * Should produce the same result as * {@link org.apache.hadoop.hive.metastore.txn.TxnUtils#isAcidTable(org.apache.hadoop.hive.metastore.api.Table)} */ public static boolean isFullAcidTable(org.apache.hadoop.hive.metastore.api.Table table) { return isTransactionalTable(table) && !isInsertOnlyTable(table.getParameters()); }
public static boolean isAvroTableWithSchemaSet(org.apache.hadoop.hive.metastore.api.Table table) { if (table.getParameters() == null) { return false; } StorageDescriptor storageDescriptor = table.getSd(); if (storageDescriptor == null) { throw new PrestoException(HIVE_INVALID_METADATA, "Table does not contain a storage descriptor: " + table); } SerDeInfo serdeInfo = storageDescriptor.getSerdeInfo(); if (serdeInfo == null) { throw new PrestoException(HIVE_INVALID_METADATA, "Table storage descriptor is missing SerDe info"); } return serdeInfo.getSerializationLib() != null && table.getParameters().get(AVRO_SCHEMA_URL_KEY) != null && serdeInfo.getSerializationLib().equals(AVRO.getSerDe()); }
@Override public PartitionStatistics getTableStatistics(String databaseName, String tableName) { Table table = getTable(databaseName, tableName) .orElseThrow(() -> new TableNotFoundException(new SchemaTableName(databaseName, tableName))); List<String> dataColumns = table.getSd().getCols().stream() .map(FieldSchema::getName) .collect(toImmutableList()); HiveBasicStatistics basicStatistics = getHiveBasicStatistics(table.getParameters()); Map<String, HiveColumnStatistics> columnStatistics = getTableColumnStatistics(databaseName, tableName, dataColumns, basicStatistics.getRowCount()); return new PartitionStatistics(basicStatistics, columnStatistics); }
@Override public synchronized void updateTableStatistics(String databaseName, String tableName, Function<PartitionStatistics, PartitionStatistics> update) { PartitionStatistics currentStatistics = getTableStatistics(databaseName, tableName); PartitionStatistics updatedStatistics = update.apply(currentStatistics); Table originalTable = getTable(databaseName, tableName) .orElseThrow(() -> new TableNotFoundException(new SchemaTableName(databaseName, tableName))); Table modifiedTable = originalTable.deepCopy(); HiveBasicStatistics basicStatistics = updatedStatistics.getBasicStatistics(); modifiedTable.setParameters(updateStatisticsParameters(modifiedTable.getParameters(), basicStatistics)); alterTable(databaseName, tableName, modifiedTable); com.facebook.presto.hive.metastore.Table table = fromMetastoreApiTable(modifiedTable); OptionalLong rowCount = basicStatistics.getRowCount(); List<ColumnStatisticsObj> metastoreColumnStatistics = updatedStatistics.getColumnStatistics().entrySet().stream() .map(entry -> createMetastoreColumnStatistics(entry.getKey(), table.getColumn(entry.getKey()).get().getType(), entry.getValue(), rowCount)) .collect(toImmutableList()); if (!metastoreColumnStatistics.isEmpty()) { setTableColumnStatistics(databaseName, tableName, metastoreColumnStatistics); } Set<String> removedColumnStatistics = difference(currentStatistics.getColumnStatistics().keySet(), updatedStatistics.getColumnStatistics().keySet()); removedColumnStatistics.forEach(column -> deleteTableColumnStatistics(databaseName, tableName, column)); }
public static String createTableObjJson(Table tableObj) throws TException { //Note: The parameters of the Table object will be removed in the filter if it matches // any pattern provided through EVENT_NOTIFICATION_PARAMETERS_EXCLUDE_PATTERNS filterMapkeys(tableObj.getParameters(), paramsFilter); TSerializer serializer = new TSerializer(new TJSONProtocol.Factory()); return serializer.toString(tableObj, "UTF-8"); }
public static Properties getSchema( org.apache.hadoop.hive.metastore.api.Partition part, org.apache.hadoop.hive.metastore.api.Table table) { return MetaStoreUtils.getSchema(part.getSd(), table.getSd(), table .getParameters(), table.getDbName(), table.getTableName(), table.getPartitionKeys()); }
private void verifyStatsUpToDate(String tbl, List<String> cols, IMetaStoreClient msClient, boolean isUpToDate) throws Exception { Table table = msClient.getTable(ss.getCurrentDatabase(), tbl); verifyStatsUpToDate(table.getParameters(), cols, isUpToDate); }
private void setTableSkipProperty( IMetaStoreClient msClient, String tbl, String val) throws Exception { Table table = msClient.getTable(ss.getCurrentDatabase(), tbl); table.getParameters().put(StatsUpdaterThread.SKIP_STATS_AUTOUPDATE_PROPERTY, val); msClient.alter_table(table.getDbName(), table.getTableName(), table); }
@Test(expected = InvalidOperationException.class) public void alterTableBogusCatalog() throws TException { Table t = testTables[0].deepCopy(); t.getParameters().put("a", "b"); client.alter_table("nosuch", t.getDbName(), t.getTableName(), t); }
private void verifyStatsUpToDate(String tbl, List<String> cols, IMetaStoreClient msClient, String validWriteIds, boolean isUpToDate) throws Exception { Table table = msClient.getTable(ss.getCurrentCatalog(), ss.getCurrentDatabase(), tbl, validWriteIds); verifyStatsUpToDate(table.getParameters(), cols, isUpToDate); }
private TableMeta createTestTable(String dbName, String tableName, TableType type, String comment) throws Exception { Table table = createTable(dbName, tableName, type); table.getParameters().put("comment", comment); client.createTable(table); TableMeta tableMeta = new TableMeta(dbName, tableName, type.name()); tableMeta.setComments(comment); tableMeta.setCatName("hive"); return tableMeta; }