Exception ex = null; try { firePreEvent(new PreAlterPartitionEvent(db_name, tbl_name, part_vals, new_part, this)); if (part_vals != null && !part_vals.isEmpty()) { MetaStoreServerUtils.validatePartitionNameCharacters(new_part.getValues(),
private void authorizeAlterPartition(PreAlterPartitionEvent context) throws InvalidOperationException, MetaException { try { org.apache.hadoop.hive.metastore.api.Partition mapiPart = context.getNewPartition(); org.apache.hadoop.hive.ql.metadata.Partition wrappedPartition = new PartitionWrapper( mapiPart, context); for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) { authorizer.authorize(wrappedPartition, null, new Privilege[]{Privilege.ALTER_METADATA}); } } catch (AuthorizationException | NoSuchObjectException e) { throw invalidOperationException(e); } catch (HiveException e) { throw metaException(e); } }
validateAlterPartition(origP, origP, preAlterPartEvent.getDbName(), preAlterPartEvent.getTableName(), preAlterPartEvent.getNewPartition().getValues(), preAlterPartEvent.getNewPartition());
getAuthServer(), context.getDbName(), context.getTableName()); HierarcyBuilder outputBuilder = new HierarcyBuilder().addTableToOutput( getAuthServer(), context.getDbName(), context.getTableName()); Partition partition = context.getNewPartition(); String partitionLocation = getSdLocation(partition.getSd()); if (!StringUtils.isEmpty(partitionLocation)) { String tableLocation = context.getHandler().get_table( partition.getDbName(), partition.getTableName()).getSd().getLocation();
tmpPart.setCatName(getDefaultCatalog(conf)); firePreEvent(new PreAlterPartitionEvent(db_name, tbl_name, null, tmpPart, this));
private void authorizeAlterPartition(PreAlterPartitionEvent context) throws InvalidOperationException, MetaException { try { org.apache.hadoop.hive.metastore.api.Partition mapiPart = context.getNewPartition(); org.apache.hadoop.hive.ql.metadata.Partition wrappedPartition = new PartitionWrapper( mapiPart, context); for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) { authorizer.authorize(wrappedPartition, null, new Privilege[]{Privilege.ALTER_METADATA}); } } catch (AuthorizationException | NoSuchObjectException e) { throw invalidOperationException(e); } catch (HiveException e) { throw metaException(e); } }
for (MetaStorePreEventListener listener : preListeners) { listener.onEvent( new PreAlterPartitionEvent(db_name, tbl_name, null, tmpPart, this));
private void authorizeAlterPartition(PreAlterPartitionEvent context) throws InvalidOperationException, MetaException { try { org.apache.hadoop.hive.metastore.api.Partition mapiPart = context.getNewPartition(); org.apache.hadoop.hive.ql.metadata.Partition wrappedPartition = new PartitionWrapper( mapiPart, context); for (HiveMetastoreAuthorizationProvider authorizer : tAuthorizers.get()) { authorizer.authorize(wrappedPartition, null, new Privilege[]{Privilege.ALTER_METADATA}); } } catch (AuthorizationException | NoSuchObjectException e) { throw invalidOperationException(e); } catch (HiveException e) { throw metaException(e); } }
Exception ex = null; try { firePreEvent(new PreAlterPartitionEvent(db_name, tbl_name, part_vals, new_part, this));
try { for (Partition tmpPart : new_parts) { firePreEvent(new PreAlterPartitionEvent(db_name, tbl_name, null, tmpPart, this));
Exception ex = null; try { firePreEvent(new PreAlterPartitionEvent(db_name, tbl_name, part_vals, new_part, this)); if (part_vals != null && !part_vals.isEmpty()) { MetaStoreUtils.validatePartitionNameCharacters(new_part.getValues(),
tmpPart.setCatName(getDefaultCatalog(conf)); firePreEvent(new PreAlterPartitionEvent(parsedDbName[DB_NAME], tbl_name, null, tmpPart, this));
Exception ex = null; try { firePreEvent(new PreAlterPartitionEvent(db_name, tbl_name, part_vals, new_part, this));
try { for (Partition tmpPart : new_parts) { firePreEvent(new PreAlterPartitionEvent(db_name, tbl_name, null, tmpPart, this));
Exception ex = null; try { firePreEvent(new PreAlterPartitionEvent(db_name, tbl_name, part_vals, new_part, this));