@Override public void run(HookContext hookContext) throws Exception { SessionState ss = SessionState.get(); Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); UserGroupInformation ugi = hookContext.getUgi(); this.run(ss,inputs,outputs,ugi); }
@Override public void run(HookContext hookContext) throws Exception { SessionState ss = SessionState.get(); Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); UserGroupInformation ugi = hookContext.getUgi(); boolean isExplain = hookContext.getQueryPlan().isExplain(); this.run(ss,inputs,outputs,ugi, isExplain); }
@Override public void run(HookContext hookContext) throws Exception { assert(hookContext.getHookType() == HookType.POST_EXEC_HOOK); Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); LineageInfo linfo = hookContext.getLinfo(); UserGroupInformation ugi = hookContext.getUgi(); this.run(hookContext.getQueryState(),inputs,outputs,linfo,ugi); }
@Override public void run(HookContext hookContext) throws Exception { assert(hookContext.getHookType() == HookType.POST_EXEC_HOOK); Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); LineageInfo linfo = hookContext.getLinfo(); UserGroupInformation ugi = hookContext.getUgi(); this.run(hookContext.getQueryState(),inputs,outputs,linfo,ugi); }
@Override public void run(HookContext hookContext) throws Exception { assert(hookContext.getHookType() == HookType.PRE_EXEC_HOOK); SessionState ss = SessionState.get(); QueryState queryState = hookContext.getQueryState(); if (ss != null && ss.out instanceof FetchConverter) { boolean foundQuery = queryState.getHiveOperation() == HiveOperation.QUERY && !hookContext.getQueryPlan().isForExplain(); ((FetchConverter)ss.out).foundQuery(foundQuery); } Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); UserGroupInformation ugi = hookContext.getUgi(); this.run(queryState,inputs,outputs,ugi); }
@Override public void run(HookContext hookContext) throws Exception { assert(hookContext.getHookType() == HookType.PRE_EXEC_HOOK); SessionState ss = SessionState.get(); QueryState queryState = hookContext.getQueryState(); if (ss != null && ss.out instanceof FetchConverter) { boolean foundQuery = queryState.getHiveOperation() == HiveOperation.QUERY && !hookContext.getQueryPlan().isForExplain(); ((FetchConverter)ss.out).foundQuery(foundQuery); } Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); UserGroupInformation ugi = hookContext.getUgi(); this.run(queryState,inputs,outputs,ugi); }
builder.addAllTablesRead(getTablesFromEntitySet(hookContext.getInputs())); builder.addAllTablesWritten(getTablesFromEntitySet(hookContext.getOutputs())); if (hookContext.getOperationId() != null) {
JSONObject explainPlan = explain.getJSONPlan(null, work); String logID = conf.getLogIdVar(hookContext.getSessionId()); List<String> tablesRead = getTablesFromEntitySet(hookContext.getInputs()); List<String> tablesWritten = getTablesFromEntitySet(hookContext.getOutputs()); String executionMode = getExecutionMode(plan).name();
JSONObject explainPlan = explain.getJSONPlan(null, work); String logID = conf.getLogIdVar(hookContext.getSessionId()); List<String> tablesRead = getTablesFromEntitySet(hookContext.getInputs()); List<String> tablesWritten = getTablesFromEntitySet(hookContext.getOutputs()); String executionMode = getExecutionMode(plan).name();
@Override public void run(HookContext hookContext) throws Exception { Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); UserGroupInformation ugi = hookContext.getUgi(); run(hookContext, inputs, outputs, ugi); }
@Override public void run(HookContext hookContext) throws Exception { Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); UserGroupInformation ugi = hookContext.getUgi(); run(hookContext, inputs, outputs, ugi); }
@Override public void run(HookContext hookContext) throws Exception { SessionState ss = SessionState.get(); Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); UserGroupInformation ugi = hookContext.getUgi(); this.run(ss,inputs,outputs,ugi); }
private boolean skipProcess() { Set<ReadEntity> inputs = getHiveContext().getInputs(); Set<WriteEntity> outputs = getHiveContext().getOutputs(); boolean ret = CollectionUtils.isEmpty(inputs) && CollectionUtils.isEmpty(outputs); if (!ret) { if (getContext().getHiveOperation() == HiveOperation.QUERY) { // Select query has only one output if (outputs.size() == 1) { WriteEntity output = outputs.iterator().next(); if (output.getType() == Entity.Type.DFS_DIR || output.getType() == Entity.Type.LOCAL_DIR) { if (output.getWriteType() == WriteEntity.WriteType.PATH_WRITE && output.isTempURI()) { ret = true; } } } } } return ret; } }
private boolean ignoreHDFSPathsinProcessQualifiedName() { switch (context.getHiveOperation()) { case LOAD: case IMPORT: return hasPartitionEntity(getHiveContext().getOutputs()); case EXPORT: return hasPartitionEntity(getHiveContext().getInputs()); case QUERY: return true; } return false; }
@Override public void run(HookContext hookContext) throws Exception { assert(hookContext.getHookType() == HookType.POST_EXEC_HOOK); SessionState ss = SessionState.get(); Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); LineageInfo linfo = hookContext.getLinfo(); UserGroupInformation ugi = hookContext.getUgi(); this.run(ss,inputs,outputs,linfo,ugi); }
@Override public List<HookNotification> getNotificationMessages() throws Exception { if (CollectionUtils.isEmpty(getHiveContext().getInputs())) { LOG.error("AlterTableRenameCol: old-table not found in inputs list"); Table oldTable = getHiveContext().getInputs().iterator().next().getTable(); Table newTable = getHiveContext().getOutputs().iterator().next().getTable();
protected String getQualifiedName(List<AtlasEntity> inputs, List<AtlasEntity> outputs) throws Exception { HiveOperation operation = context.getHiveOperation(); if (operation == HiveOperation.CREATETABLE || operation == HiveOperation.CREATETABLE_AS_SELECT || operation == HiveOperation.CREATEVIEW || operation == HiveOperation.ALTERVIEW_AS || operation == HiveOperation.ALTERTABLE_LOCATION) { List<? extends Entity> sortedEntities = new ArrayList<>(getHiveContext().getOutputs()); Collections.sort(sortedEntities, entityComparator); for (Entity entity : sortedEntities) { if (entity.getType() == Entity.Type.TABLE) { Table table = entity.getTable(); table = getHive().getTable(table.getDbName(), table.getTableName()); long createTime = getTableCreateTime(table); return getQualifiedName(table) + QNAME_SEP_PROCESS + createTime; } } } StringBuilder sb = new StringBuilder(getHiveContext().getOperationName()); boolean ignoreHDFSPaths = ignoreHDFSPathsinProcessQualifiedName(); addToProcessQualifiedName(sb, getHiveContext().getInputs(), ignoreHDFSPaths); sb.append("->"); addToProcessQualifiedName(sb, getHiveContext().getOutputs(), ignoreHDFSPaths); return sb.toString(); }
@Override public void run(HookContext hookContext) throws Exception { assert(hookContext.getHookType() == HookType.PRE_EXEC_HOOK); SessionState ss = SessionState.get(); if (ss != null && ss.out instanceof FetchConverter) { boolean foundQuery = ss.getHiveOperation() == HiveOperation.QUERY && !hookContext.getQueryPlan().isForExplain(); ((FetchConverter)ss.out).foundQuery(foundQuery); } Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); UserGroupInformation ugi = hookContext.getUgi(); this.run(ss,inputs,outputs,ugi); }
event.setInputs(hookContext.getInputs()); event.setOutputs(hookContext.getOutputs()); event.setHookType(hookContext.getHookType());