@Override public String datasetURN() { return this.table.getCompleteName(); }
@VisibleForTesting public static String tableKey(Table table) { return table.getCompleteName(); }
@Override public LongWatermark getPreviousHighWatermark(Table table) { if (this.tableWatermarks.containsKey(table.getCompleteName())) { return this.tableWatermarks.get(table.getCompleteName()); } return new LongWatermark(0); }
@Override public Void call() throws Exception { // Execute validation queries log.debug(String.format("Going to execute queries: %s for format: %s", validationQueries, format)); List<Long> rowCounts = ValidationJob.this.getValidationOutputFromHive(validationQueries); log.debug(String.format("Going to execute queries: %s for format: %s", dataValidationQueries, format)); List<Long> rowDataValidatedCount = ValidationJob.this.getValidationOutputFromHive(dataValidationQueries); // Validate and populate report validateAndPopulateReport(hiveDataset.getTable().getCompleteName(), updateTime, rowCounts, rowDataValidatedCount); return null; } }));
private boolean canCopyTable() { if (!COPYABLE_TABLES.contains(this.table.getTableType())) { log.warn(String.format("Not copying %s: tables of type %s are not copyable.", this.table.getCompleteName(), this.table.getTableType())); return false; } return true; } }
private List<FileSet<CopyEntity>> generateAllFileSets(Map<List<String>, Partition> partitionMap) { List<FileSet<CopyEntity>> fileSets = Lists.newArrayList(); for (Map.Entry<List<String>, Partition> partition : partitionMap.entrySet()) { fileSets.add(fileSetForPartition(partition.getValue())); HiveCopyEntityHelper.this.targetPartitions.remove(partition.getKey()); } if (!HiveCopyEntityHelper.this.targetPartitions.isEmpty()) { fileSets.add(new HivePartitionsDeregisterFileSet( HiveCopyEntityHelper.this.dataset.getTable().getCompleteName() + DEREGISTER_FILE_SET, HiveCopyEntityHelper.this.dataset, HiveCopyEntityHelper.this.targetPartitions.values(), HiveCopyEntityHelper.this)); } return fileSets; }
@Override public String apply(FooterStatCollector sc) { return String.format("%s#%s", sc.partish.getTable().getCompleteName(), sc.partish.getPartishType()); } };
@Explain(explainLevels = { Level.USER }) public String getTbl() { StringBuffer sb = new StringBuffer(); sb.append(this.tableMetadata.getCompleteName()); sb.append("," + alias); if (isAcidTable()) { sb.append(", ACID table"); } sb.append(",Tbl:"); sb.append(this.statistics.getBasicStatsState()); sb.append(",Col:"); sb.append(this.statistics.getColumnStatsState()); return sb.toString(); }
/** * @return include the db name */ public String getCompleteName() { return getTable().getCompleteName() + "@" + getName(); }
/** * @return include the db name */ public String getCompleteName() { return getCompleteName(getDbName(), getTableName()); }
@Explain(explainLevels = { Level.USER }) public String getTbl() { StringBuilder sb = new StringBuilder(); sb.append(this.tableMetadata.getCompleteName()); sb.append("," + alias); if (AcidUtils.isFullAcidTable(tableMetadata)) { sb.append(", ACID table"); } else if (isTranscationalTable()) { sb.append(", transactional table"); } sb.append(",Tbl:"); sb.append(this.statistics.getBasicStatsState()); sb.append(",Col:"); sb.append(this.statistics.getColumnStatsState()); return sb.toString(); }
/** * @return include the db name */ public String getCompleteName() { return getTable().getCompleteName() + "@" + getName(); }
/** * @return include the db name */ public String getCompleteName() { return getCompleteName(getDbName(), getTableName()); }
private static Table mockTable(String name) { Table table = Mockito.mock(Table.class, Mockito.RETURNS_SMART_NULLS); Mockito.when(table.getCompleteName()).thenReturn("db@" + name); return table; }
private static Table mockTable(String name) { Table table = Mockito.mock(Table.class, Mockito.RETURNS_SMART_NULLS); Mockito.when(table.getCompleteName()).thenReturn(name); return table; }
/** * Automatically sets the dataset urn by calling {@link #setDatasetUrn(String)} */ public void setHiveDataset(HiveDataset hiveDataset) { this.setProp(HIVE_DATASET_SERIALIZED_KEY, HiveSource.GENERICS_AWARE_GSON.toJson(hiveDataset, HiveDataset.class)); setDatasetUrn(hiveDataset.getTable().getCompleteName()); }
/** * For updates, we need to set the column access info so that it contains information on * the columns we are updating. * (But not all the columns of the target table even though the rewritten query writes * all columns of target table since that is an implmentation detail). */ protected void setUpAccessControlInfoForUpdate(Table mTable, Map<String, ASTNode> setCols) { ColumnAccessInfo cai = new ColumnAccessInfo(); for (String colName : setCols.keySet()) { cai.add(Table.getCompleteName(mTable.getDbName(), mTable.getTableName()), colName); } setUpdateColumnAccessInfo(cai); }
/** * Currently updated the {@link #HIVE_TABLE_AVRO_SCHEMA_URL} location for new hive table * @param targetTable, new Table to be registered in hive * @throws IOException */ public static void updateTableAttributesIfAvro(Table targetTable, HiveCopyEntityHelper hiveHelper) throws IOException { if (isHiveTableAvroType(targetTable)) { updateAvroSchemaURL(targetTable.getCompleteName(), targetTable.getTTable().getSd(), hiveHelper); } }
/** * For updates, we need to set the column access info so that it contains information on * the columns we are updating. * (But not all the columns of the target table even though the rewritten query writes * all columns of target table since that is an implmentation detail) */ private void setUpAccessControlInfoForUpdate(Table mTable, Map<String, ASTNode> setCols) { ColumnAccessInfo cai = new ColumnAccessInfo(); for (String colName : setCols.keySet()) { cai.add(Table.getCompleteName(mTable.getDbName(), mTable.getTableName()), colName); } setUpdateColumnAccessInfo(cai); } /**
/** * Set SLA event metadata in the workunit. The publisher will use this metadta to publish sla events */ public static void setTableSlaEventMetadata(WorkUnit state, Table table, long updateTime, long lowWatermark, long beginGetWorkunitsTime) { state.setProp(SlaEventKeys.DATASET_URN_KEY, state.getProp(ConfigurationKeys.DATASET_URN_KEY)); state.setProp(SlaEventKeys.PARTITION_KEY, table.getCompleteName()); state.setProp(SlaEventKeys.UPSTREAM_TS_IN_MILLI_SECS_KEY, String.valueOf(updateTime)); // Time when the workunit was created state.setProp(SlaEventKeys.ORIGIN_TS_IN_MILLI_SECS_KEY, System.currentTimeMillis()); state.setProp(EventConstants.WORK_UNIT_CREATE_TIME, state.getProp(SlaEventKeys.ORIGIN_TS_IN_MILLI_SECS_KEY)); state.setProp(EventConstants.BEGIN_GET_WORKUNITS_TIME, beginGetWorkunitsTime); state.setProp(SlaEventKeys.PREVIOUS_PUBLISH_TS_IN_MILLI_SECS_KEY, lowWatermark); }