public boolean isNonNative() { return getProperty( org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE) != null; }
public boolean isNonNative() { return getProperty( org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE) != null; }
/** Checks if a table is a valid ACID table. * Note, users are responsible for using the correct TxnManager. We do not look at * SessionState.get().getTxnMgr().supportsAcid() here * @param table table * @return true if table is a legit ACID table, false otherwise */ public static boolean isAcidTable(Table table) { if (table == null) { return false; } String tableIsTransactional = table.getProperty(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL); if (tableIsTransactional == null) { tableIsTransactional = table.getProperty(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL.toUpperCase()); } return tableIsTransactional != null && tableIsTransactional.equalsIgnoreCase("true"); }
public int getBucketingVersion() { return Utilities.getBucketingVersion( getProperty(hive_metastoreConstants.TABLE_BUCKETING_VERSION)); }
protected long getSize(HiveConf conf, Table table) { Path path = table.getPath(); String size = table.getProperty("totalSize"); return getSize(conf, size, path); }
/** * Returns the acidOperationalProperties for a given table. * @param table A table object * @return the acidOperationalProperties object for the corresponding table. */ public static AcidOperationalProperties getAcidOperationalProperties(Table table) { String transactionalProperties = table.getProperty( hive_metastoreConstants.TABLE_TRANSACTIONAL_PROPERTIES); if (transactionalProperties == null) { // If the table does not define any transactional properties, we return a default type. return AcidOperationalProperties.getDefault(); } return AcidOperationalProperties.parseString(transactionalProperties); }
protected long getSize(HiveConf conf, Table table) { Path path = table.getPath(); String size = table.getProperty("totalSize"); return getSize(conf, size, path); }
/** * Returns the acidOperationalProperties for a given table. * @param table A table object * @return the acidOperationalProperties object for the corresponding table. */ public static AcidOperationalProperties getAcidOperationalProperties(Table table) { String transactionalProperties = table.getProperty( hive_metastoreConstants.TABLE_TRANSACTIONAL_PROPERTIES); if (transactionalProperties == null) { // If the table does not define any transactional properties, we return a legacy type. return AcidOperationalProperties.getLegacy(); } return AcidOperationalProperties.parseString(transactionalProperties); }
public HiveStorageHandler getStorageHandler() { if (storageHandler != null || !isNonNative()) { return storageHandler; } try { storageHandler = HiveUtils.getStorageHandler( SessionState.getSessionConf(), getProperty( org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE)); } catch (Exception e) { throw new RuntimeException(e); } return storageHandler; }
public HiveStorageHandler getStorageHandler() { if (storageHandler != null || !isNonNative()) { return storageHandler; } try { storageHandler = HiveUtils.getStorageHandler( SessionState.getSessionConf(), getProperty( org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE)); } catch (Exception e) { throw new RuntimeException(e); } return storageHandler; }
tableMeta.setComments(table.getProperty("comment")); tableMetas.add(tableMeta);
tableMeta.setComments(table.getProperty("comment")); tableMetas.add(tableMeta);
for (Partition partition : partitions) { final FileSystem newPathFileSystem = partition.getPartitionPath().getFileSystem(this.getConf()); boolean isAutoPurge = "true".equalsIgnoreCase(tbl.getProperty("auto.purge")); final FileStatus status = newPathFileSystem.getFileStatus(partition.getPartitionPath()); Hive.trashFiles(newPathFileSystem, new FileStatus[]{status}, this.getConf(), isAutoPurge);
/** * @param context current JobContext * @param baseCommitter OutputCommitter to contain * @throws IOException */ public FileOutputCommitterContainer(JobContext context, org.apache.hadoop.mapred.OutputCommitter baseCommitter) throws IOException { super(context, baseCommitter); jobInfo = HCatOutputFormat.getJobInfo(context.getConfiguration()); dynamicPartitioningUsed = jobInfo.isDynamicPartitioningUsed(); this.partitionsDiscovered = !dynamicPartitioningUsed; cachedStorageHandler = HCatUtil.getStorageHandler(context.getConfiguration(), jobInfo.getTableInfo().getStorerInfo()); Table table = new Table(jobInfo.getTableInfo().getTable()); if (dynamicPartitioningUsed && Boolean.parseBoolean((String)table.getProperty("EXTERNAL")) && jobInfo.getCustomDynamicPath() != null && jobInfo.getCustomDynamicPath().length() > 0) { customDynamicLocationUsed = true; } else { customDynamicLocationUsed = false; } this.maxAppendAttempts = context.getConfiguration().getInt(HCatConstants.HCAT_APPEND_LIMIT, APPEND_COUNTER_WARN_THRESHOLD); }
String propertyName = showTblPrpt.getPropertyName(); if (propertyName != null) { String propertyValue = tbl.getProperty(propertyName); if (propertyValue == null) { String errMsg = "Table " + tableName + " does not have property: " + propertyName;
try { final FileSystem newPathFileSystem = newTPart.getPartitionPath().getFileSystem(this.getConf()); boolean isAutoPurge = "true".equalsIgnoreCase(tbl.getProperty("auto.purge")); final FileStatus status = newPathFileSystem.getFileStatus(newTPart.getPartitionPath()); Hive.trashFiles(newPathFileSystem, new FileStatus[]{status}, this.getConf(), isAutoPurge);
final String timeWindowString = mv.getProperty(MATERIALIZED_VIEW_REWRITING_TIME_WINDOW); final String mode; if (!org.apache.commons.lang.StringUtils.isEmpty(timeWindowString)) {
return null; rowCnt = Long.parseLong(tbl.getProperty(StatsSetupConst.ROW_COUNT)); if (rowCnt == null) {
return null; rowCnt = Long.parseLong(tbl.getProperty(StatsSetupConst.ROW_COUNT)); if (rowCnt == null) {
final String timeWindowString = mv.getProperty(MATERIALIZED_VIEW_REWRITING_TIME_WINDOW); final String mode; if (!org.apache.commons.lang.StringUtils.isEmpty(timeWindowString)) {