/** update partition timeline cache for addition of time partition */ public void updateForAddition(String cubeTableName, String storageName, UpdatePeriod updatePeriod, Map<String, TreeSet<Date>> timePartSpec) throws HiveException, LensException { // fail fast. All part cols mentioned in all partitions should exist. for (String partCol : timePartSpec.keySet()) { getAndFailFast(cubeTableName, storageName, updatePeriod, partCol); } for (Map.Entry<String, TreeSet<Date>> entry : timePartSpec.entrySet()) { for (Date dt : entry.getValue()) { get(cubeTableName, storageName, updatePeriod, entry.getKey()).add(TimePartition.of(updatePeriod, dt)); } } }
/** update partition timeline cache for addition of time partition */ public void updateForAddition(String cubeTableName, String storageName, UpdatePeriod updatePeriod, Map<String, TreeSet<Date>> timePartSpec) throws HiveException, LensException { // fail fast. All part cols mentioned in all partitions should exist. for (String partCol : timePartSpec.keySet()) { getAndFailFast(cubeTableName, storageName, updatePeriod, partCol); } for (Map.Entry<String, TreeSet<Date>> entry : timePartSpec.entrySet()) { for (Date dt : entry.getValue()) { get(cubeTableName, storageName, updatePeriod, entry.getKey()).add(TimePartition.of(updatePeriod, dt)); } } }
.get(factName, storageName).entrySet()) { if (updatePeriod == null || entry.getKey().equals(updatePeriod)) { for (Map.Entry<String, PartitionTimeline> entry1 : entry.getValue().entrySet()) {
for (String storage : fact.getStorages()) { for (UpdatePeriod updatePeriod : fact.getUpdatePeriods().get(storage)) { PartitionTimeline timeline = partitionTimelineCache.get(fact.getSourceFactName(), storage, updatePeriod, partCol); if (timeline != null) {// this storage table is partitioned by partCol or not.
for (String storage : fact.getStorages()) { for (UpdatePeriod updatePeriod : fact.getUpdatePeriods().get(storage)) { PartitionTimeline timeline = partitionTimelineCache.get(fact.getSourceFactName(), storage, updatePeriod, partCol); if (timeline != null) {// this storage table is partitioned by partCol or not.
/** * get all timelines for all update periods and partition columns for the given fact-storage pair. If already loaded * in memory, it'll return that. If not, it'll first try to load it from table properties. If not found in table * properties, it'll get all partitions, compute timelines in memory, write back all loads timelines to table * properties for further usage and return them. * * @param fact fact * @param storage storage * @return all timelines for fact-storage pair. Load from properties/all partitions if needed. * @throws HiveException * @throws LensException */ public TreeMap<UpdatePeriod, CaseInsensitiveStringHashMap<PartitionTimeline>> get(String fact, String storage) throws HiveException, LensException { // SUSPEND CHECKSTYLE CHECK DoubleCheckedLockingCheck // Unique key for the timeline cache, based on storage and fact. String timeLineKey = (Storage.getPrefix(storage)+ fact).toLowerCase(); synchronized (this) { if (get(timeLineKey) == null) { loadTimeLines(fact, storage, timeLineKey); } log.debug("timeline for {} is: {}", storage, get(timeLineKey)); // return the final value from memory return get(timeLineKey); // RESUME CHECKSTYLE CHECK DoubleCheckedLockingCheck } }
/** * get all timelines for all update periods and partition columns for the given fact-storage pair. If already loaded * in memory, it'll return that. If not, it'll first try to load it from table properties. If not found in table * properties, it'll get all partitions, compute timelines in memory, write back all loads timelines to table * properties for further usage and return them. * * @param fact fact * @param storage storage * @return all timelines for fact-storage pair. Load from properties/all partitions if needed. * @throws HiveException * @throws LensException */ public TreeMap<UpdatePeriod, CaseInsensitiveStringHashMap<PartitionTimeline>> get(String fact, String storage) throws HiveException, LensException { // SUSPEND CHECKSTYLE CHECK DoubleCheckedLockingCheck // Unique key for the timeline cache, based on storage and fact. String timeLineKey = (Storage.getPrefix(storage)+ fact).toLowerCase(); synchronized (this) { if (get(timeLineKey) == null) { loadTimeLines(fact, storage, timeLineKey); } log.debug("timeline for {} is: {}", storage, get(timeLineKey)); // return the final value from memory return get(timeLineKey); // RESUME CHECKSTYLE CHECK DoubleCheckedLockingCheck } }
/** * * @param fact fact * @param storage storage * @param partCol part column * @return true if all the timelines for fact-storage table are empty for all valid update periods. * @throws HiveException * @throws LensException */ public boolean noPartitionsExist(String fact, String storage, String partCol) throws HiveException, LensException { if (get(fact, storage) == null) { return true; } for (UpdatePeriod updatePeriod : get(fact, storage).keySet()) { PartitionTimeline timeline = get(fact, storage, updatePeriod, partCol); if (timeline != null && !timeline.isEmpty()) { return false; } } return true; }
/** * store back all timelines of given storage to table properties * * @param timeLineKey key for the time line * @param storageTableName Storage table name * @throws HiveException */ private void alterTablePartitionCache(String timeLineKey, UpdatePeriod updatePeriod, String storageTableName) throws HiveException, LensException { Table table = getTable(storageTableName); Map<String, String> params = table.getParameters(); if (partitionTimelineCache.get(timeLineKey) != null) { for (Map.Entry<String, PartitionTimeline> entry : partitionTimelineCache.get(timeLineKey).get(updatePeriod) .entrySet()) { entry.getValue().updateTableParams(table); } params.put(getPartitionTimelineCachePresenceKey(), "true"); alterHiveTable(storageTableName, table); } }
/** * * @param fact fact * @param storage storage * @param partCol part column * @return true if all the timelines for fact-storage table are empty for all valid update periods. * @throws HiveException * @throws LensException */ public boolean noPartitionsExist(String fact, String storage, String partCol) throws HiveException, LensException { if (get(fact, storage) == null) { return true; } for (UpdatePeriod updatePeriod : get(fact, storage).keySet()) { PartitionTimeline timeline = get(fact, storage, updatePeriod, partCol); if (timeline != null && !timeline.isEmpty()) { return false; } } return true; }
/** * store back all timelines of given storage to table properties * * @param timeLineKey key for the time line * @param storageTableName Storage table name * @throws HiveException */ private void alterTablePartitionCache(String timeLineKey, UpdatePeriod updatePeriod, String storageTableName) throws HiveException, LensException { Table table = getTable(storageTableName); Map<String, String> params = table.getParameters(); if (partitionTimelineCache.get(timeLineKey) != null) { for (Map.Entry<String, PartitionTimeline> entry : partitionTimelineCache.get(timeLineKey).get(updatePeriod) .entrySet()) { entry.getValue().updateTableParams(table); } params.put(getPartitionTimelineCachePresenceKey(), "true"); alterHiveTable(storageTableName, table); } }
/** update partition timeline cache for deletion of time partition */ public boolean updateForDeletion(String cubeTableName, String storageName, UpdatePeriod updatePeriod, Map<String, Date> timePartSpec) throws HiveException, LensException { // fail fast. All part cols mentioned in all partitions should exist. for (String partCol : timePartSpec.keySet()) { getAndFailFast(cubeTableName, storageName, updatePeriod, partCol); } boolean updated = false; for (Map.Entry<String, Date> entry : timePartSpec.entrySet()) { TimePartition part = TimePartition.of(updatePeriod, entry.getValue()); if (!partitionExistsByFilter(cubeTableName, storageName, updatePeriod, StorageConstants.getPartFilter(entry.getKey(), part.getDateString()))) { get(cubeTableName, storageName, updatePeriod, entry.getKey()).drop(part); updated = true; } } return updated; } }
/** update partition timeline cache for deletion of time partition */ public boolean updateForDeletion(String cubeTableName, String storageName, UpdatePeriod updatePeriod, Map<String, Date> timePartSpec) throws HiveException, LensException { // fail fast. All part cols mentioned in all partitions should exist. for (String partCol : timePartSpec.keySet()) { getAndFailFast(cubeTableName, storageName, updatePeriod, partCol); } boolean updated = false; for (Map.Entry<String, Date> entry : timePartSpec.entrySet()) { TimePartition part = TimePartition.of(updatePeriod, entry.getValue()); if (!partitionExistsByFilter(cubeTableName, storageName, updatePeriod, StorageConstants.getPartFilter(entry.getKey(), part.getDateString()))) { get(cubeTableName, storageName, updatePeriod, entry.getKey()).drop(part); updated = true; } } return updated; } }
/** * returns the timeline corresponding to fact-storage table, updatePeriod, partCol. throws exception if not * exists, which would most probably mean the combination is incorrect. */ public PartitionTimeline getAndFailFast(String fact, String storage, UpdatePeriod updatePeriod, String partCol) throws HiveException, LensException { PartitionTimeline timeline = get(fact, storage, updatePeriod, partCol); if (timeline == null) { throw new LensException(LensCubeErrorCode.TIMELINE_ABSENT.getLensErrorInfo(), fact, storage, updatePeriod, partCol); } return timeline; }
/** * returns the timeline corresponding to fact-storage table, updatePeriod, partCol. throws exception if not * exists, which would most probably mean the combination is incorrect. */ public PartitionTimeline getAndFailFast(String fact, String storage, UpdatePeriod updatePeriod, String partCol) throws HiveException, LensException { PartitionTimeline timeline = get(fact, storage, updatePeriod, partCol); if (timeline == null) { throw new LensException(LensCubeErrorCode.TIMELINE_ABSENT.getLensErrorInfo(), fact, storage, updatePeriod, partCol); } return timeline; }
private void assertSameTimelines(String factName, String[] storages, UpdatePeriod updatePeriod, String[] partCols) throws HiveException, LensException { for (String partCol : partCols) { ArrayList<PartitionTimeline> timelines = Lists.newArrayList(); for (String storage : storages) { timelines.add(client.partitionTimelineCache.get(factName, storage, updatePeriod, partCol)); } TestPartitionTimelines.assertSameTimelines(timelines); } }
/** * returns the timeline corresponding to fact-storage table, updatePeriod, partCol. null if doesn't exist, which * would only happen if the combination is not valid/supported */ public PartitionTimeline get(String fact, String storage, UpdatePeriod updatePeriod, String partCol) throws HiveException, LensException { return get(fact, storage) != null && get(fact, storage).get(updatePeriod) != null && get(fact, storage).get(updatePeriod).get(partCol) != null ? get(fact, storage).get(updatePeriod) .get(partCol) : null; }
/** check partition existence in the appropriate timeline if it exists */ public boolean partitionTimeExists(String name, String storage, UpdatePeriod period, String partCol, Date partSpec) throws HiveException, LensException { return get(name, storage, period, partCol) != null && get(name, storage, period, partCol).exists(TimePartition.of(period, partSpec)); }
/** * returns the timeline corresponding to fact-storage table, updatePeriod, partCol. null if doesn't exist, which * would only happen if the combination is not valid/supported */ public PartitionTimeline get(String fact, String storage, UpdatePeriod updatePeriod, String partCol) throws HiveException, LensException { return get(fact, storage) != null && get(fact, storage).get(updatePeriod) != null && get(fact, storage).get(updatePeriod).get(partCol) != null ? get(fact, storage).get(updatePeriod) .get(partCol) : null; }
/** check partition existence in the appropriate timeline if it exists */ public boolean partitionTimeExists(String name, String storage, UpdatePeriod period, String partCol, Date partSpec) throws HiveException, LensException { return get(name, storage, period, partCol) != null && get(name, storage, period, partCol).exists(TimePartition.of(period, partSpec)); }