private void loadTimelinesFromAllPartitions(String storageTableName, String timeLineKey) throws HiveException, LensException { // Then add all existing partitions for batch addition in respective timelines. Table storageTable = getTable(storageTableName); List<String> timeParts = getTimePartColNamesOfTable(storageTable); List<FieldSchema> partCols = storageTable.getPartCols(); for (Partition partition : getPartitionsByFilter(storageTableName, null)) { UpdatePeriod period = deduceUpdatePeriod(partition); List<String> values = partition.getValues(); if (values.contains(StorageConstants.LATEST_PARTITION_VALUE)) { log.info("dropping latest partition from fact storage table: {}. Spec: {}", storageTableName, partition.getSpec()); getClient().dropPartition(storageTableName, values, false); continue; } for (int i = 0; i < partCols.size(); i++) { if (timeParts.contains(partCols.get(i).getName())) { addForBatchAddition(timeLineKey, storageTableName, period, partCols.get(i).getName(), values.get(i)); } } } }
private void loadTimelinesFromAllPartitions(String storageTableName, String timeLineKey) throws HiveException, LensException { // Then add all existing partitions for batch addition in respective timelines. Table storageTable = getTable(storageTableName); List<String> timeParts = getTimePartColNamesOfTable(storageTable); List<FieldSchema> partCols = storageTable.getPartCols(); for (Partition partition : getPartitionsByFilter(storageTableName, null)) { UpdatePeriod period = deduceUpdatePeriod(partition); List<String> values = partition.getValues(); if (values.contains(StorageConstants.LATEST_PARTITION_VALUE)) { log.info("dropping latest partition from fact storage table: {}. Spec: {}", storageTableName, partition.getSpec()); getClient().dropPartition(storageTableName, values, false); continue; } for (int i = 0; i < partCols.size(); i++) { if (timeParts.contains(partCols.get(i).getName())) { addForBatchAddition(timeLineKey, storageTableName, period, partCols.get(i).getName(), values.get(i)); } } } }