/** * Extracts timeline implementation class from table params and instantiates it with other arguments, also in table * params. * * @param table * @throws LensException * @see #updateTableParams(org.apache.hadoop.hive.ql.metadata.Table) */ public void init(Table table) throws LensException { HashMap<String, String> props = Maps.newHashMap(); String prefix = MetastoreUtil.getPartitionInfoKeyPrefix(getUpdatePeriod(), getPartCol()); for (Map.Entry<String, String> entry : table.getParameters().entrySet()) { if (entry.getKey().startsWith(prefix)) { props.put(entry.getKey().substring(prefix.length()), entry.getValue()); } } log.info("initializing timeline from table properties: {},{},{}", getStorageTableName(), getUpdatePeriod(), getPartCol()); initFromProperties(props); log.info("initialized to: {}", this); }
/** * Sets PartitionTimeline implementation class's name and specific params in table param. * * @param table * @see #init(org.apache.hadoop.hive.ql.metadata.Table) */ public void updateTableParams(Table table) { String prefix = MetastoreUtil.getPartitionInfoKeyPrefix(getUpdatePeriod(), getPartCol()); String storageClass = MetastoreUtil.getPartitionTimelineStorageClassKey(getUpdatePeriod(), getPartCol()); table.getParameters().put(storageClass, this.getClass().getCanonicalName()); for (Map.Entry<String, String> entry : toProperties().entrySet()) { table.getParameters().put(prefix + entry .getKey(), entry.getValue()); } }
/** * Sets PartitionTimeline implementation class's name and specific params in table param. * * @param table * @see #init(org.apache.hadoop.hive.ql.metadata.Table) */ public void updateTableParams(Table table) { String prefix = MetastoreUtil.getPartitionInfoKeyPrefix(getUpdatePeriod(), getPartCol()); String storageClass = MetastoreUtil.getPartitionTimelineStorageClassKey(getUpdatePeriod(), getPartCol()); table.getParameters().put(storageClass, this.getClass().getCanonicalName()); for (Map.Entry<String, String> entry : toProperties().entrySet()) { table.getParameters().put(prefix + entry .getKey(), entry.getValue()); } }
/** * Extracts timeline implementation class from table params and instantiates it with other arguments, also in table * params. * * @param table * @throws LensException * @see #updateTableParams(org.apache.hadoop.hive.ql.metadata.Table) */ public void init(Table table) throws LensException { HashMap<String, String> props = Maps.newHashMap(); String prefix = MetastoreUtil.getPartitionInfoKeyPrefix(getUpdatePeriod(), getPartCol()); for (Map.Entry<String, String> entry : table.getParameters().entrySet()) { if (entry.getKey().startsWith(prefix)) { props.put(entry.getKey().substring(prefix.length()), entry.getValue()); } } log.info("initializing timeline from table properties: {},{},{}", getStorageTableName(), getUpdatePeriod(), getPartCol()); initFromProperties(props); log.info("initialized to: {}", this); }
/** * Commit all partitions that were added to batch addition queue. //TODO: improve batch addition implementation. * * @return true if all the partitions were added successfully, or no partitions needed to be added * @throws LensException */ public boolean commitBatchAdditions() throws LensException { if (getAll() == null) { return true; } log.info("initializing timeline from batch addition: {},{},{}", getStorageTableName(), getUpdatePeriod(), getPartCol()); boolean result = add(getAll()); all = null; log.info("initialized to: {}", this); return result; }
/** * Commit all partitions that were added to batch addition queue. //TODO: improve batch addition implementation. * * @return true if all the partitions were added successfully, or no partitions needed to be added * @throws LensException */ public boolean commitBatchAdditions() throws LensException { if (getAll() == null) { return true; } log.info("initializing timeline from batch addition: {},{},{}", getStorageTableName(), getUpdatePeriod(), getPartCol()); boolean result = add(getAll()); all = null; log.info("initialized to: {}", this); return result; }