@Override public void run(HookContext hookContext) throws Exception { SessionState ss = SessionState.get(); Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); UserGroupInformation ugi = hookContext.getUgi(); this.run(ss,inputs,outputs,ugi); }
@Override public void run(HookContext hookContext) throws Exception { SessionState ss = SessionState.get(); Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); UserGroupInformation ugi = hookContext.getUgi(); boolean isExplain = hookContext.getQueryPlan().isExplain(); this.run(ss,inputs,outputs,ugi, isExplain); }
@Override public void run(HookContext hookContext) throws Exception { assert(hookContext.getHookType() == HookType.POST_EXEC_HOOK); Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); LineageInfo linfo = hookContext.getLinfo(); UserGroupInformation ugi = hookContext.getUgi(); this.run(hookContext.getQueryState(),inputs,outputs,linfo,ugi); }
@Override public void run(HookContext hookContext) throws Exception { assert(hookContext.getHookType() == HookType.POST_EXEC_HOOK); Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); LineageInfo linfo = hookContext.getLinfo(); UserGroupInformation ugi = hookContext.getUgi(); this.run(hookContext.getQueryState(),inputs,outputs,linfo,ugi); }
@Override public void run(HookContext hookContext) throws Exception { assert(hookContext.getHookType() == HookType.PRE_EXEC_HOOK); SessionState ss = SessionState.get(); QueryState queryState = hookContext.getQueryState(); if (ss != null && ss.out instanceof FetchConverter) { boolean foundQuery = queryState.getHiveOperation() == HiveOperation.QUERY && !hookContext.getQueryPlan().isForExplain(); ((FetchConverter)ss.out).foundQuery(foundQuery); } Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); UserGroupInformation ugi = hookContext.getUgi(); this.run(queryState,inputs,outputs,ugi); }
@Override public void run(HookContext hookContext) throws Exception { assert(hookContext.getHookType() == HookType.PRE_EXEC_HOOK); SessionState ss = SessionState.get(); QueryState queryState = hookContext.getQueryState(); if (ss != null && ss.out instanceof FetchConverter) { boolean foundQuery = queryState.getHiveOperation() == HiveOperation.QUERY && !hookContext.getQueryPlan().isForExplain(); ((FetchConverter)ss.out).foundQuery(foundQuery); } Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); UserGroupInformation ugi = hookContext.getUgi(); this.run(queryState,inputs,outputs,ugi); }
builder.addAllTablesWritten(getTablesFromEntitySet(hookContext.getOutputs())); if (hookContext.getOperationId() != null) { builder.setOperationId(hookContext.getOperationId());
String logID = conf.getLogIdVar(hookContext.getSessionId()); List<String> tablesRead = getTablesFromEntitySet(hookContext.getInputs()); List<String> tablesWritten = getTablesFromEntitySet(hookContext.getOutputs()); String executionMode = getExecutionMode(plan).name(); String hiveInstanceAddress = hookContext.getHiveInstanceAddress();
String logID = conf.getLogIdVar(hookContext.getSessionId()); List<String> tablesRead = getTablesFromEntitySet(hookContext.getInputs()); List<String> tablesWritten = getTablesFromEntitySet(hookContext.getOutputs()); String executionMode = getExecutionMode(plan).name(); String hiveInstanceAddress = hookContext.getHiveInstanceAddress();
@Override public void run(HookContext hookContext) throws Exception { Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); UserGroupInformation ugi = hookContext.getUgi(); run(hookContext, inputs, outputs, ugi); }
@Override public void run(HookContext hookContext) throws Exception { Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); UserGroupInformation ugi = hookContext.getUgi(); run(hookContext, inputs, outputs, ugi); }
public List<AtlasObjectId> getEntities() throws Exception { List<AtlasObjectId> ret = new ArrayList<>(); for (Entity entity : getHiveContext().getOutputs()) { if (entity.getType() == Entity.Type.TABLE) { String tblQName = getQualifiedName(entity.getTable()); AtlasObjectId dbId = new AtlasObjectId(HIVE_TYPE_TABLE, ATTRIBUTE_QUALIFIED_NAME, tblQName); context.removeFromKnownTable(tblQName); ret.add(dbId); } } return ret; } }
private void init() { if (knownObjects != null) { String operationName = hiveContext.getOperationName(); if (operationName != null && operationName.startsWith("CREATE") || operationName.startsWith("ALTER")) { if (CollectionUtils.isNotEmpty(hiveContext.getOutputs())) { for (WriteEntity output : hiveContext.getOutputs()) { switch (output.getType()) { case DATABASE: knownObjects.removeFromKnownDatabase(getQualifiedName(output.getDatabase())); break; case TABLE: knownObjects.removeFromKnownTable(getQualifiedName(output.getTable())); break; } } } } } } }
@Override public void run(HookContext hookContext) throws Exception { SessionState ss = SessionState.get(); Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); UserGroupInformation ugi = hookContext.getUgi(); this.run(ss,inputs,outputs,ugi); }
private boolean skipProcess() { Set<ReadEntity> inputs = getHiveContext().getInputs(); Set<WriteEntity> outputs = getHiveContext().getOutputs(); boolean ret = CollectionUtils.isEmpty(inputs) && CollectionUtils.isEmpty(outputs); if (!ret) { if (getContext().getHiveOperation() == HiveOperation.QUERY) { // Select query has only one output if (outputs.size() == 1) { WriteEntity output = outputs.iterator().next(); if (output.getType() == Entity.Type.DFS_DIR || output.getType() == Entity.Type.LOCAL_DIR) { if (output.getWriteType() == WriteEntity.WriteType.PATH_WRITE && output.isTempURI()) { ret = true; } } } } } return ret; } }
@Override public void run(HookContext hookContext) throws Exception { assert(hookContext.getHookType() == HookType.POST_EXEC_HOOK); SessionState ss = SessionState.get(); Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); LineageInfo linfo = hookContext.getLinfo(); UserGroupInformation ugi = hookContext.getUgi(); this.run(ss,inputs,outputs,linfo,ugi); }
private boolean ignoreHDFSPathsinProcessQualifiedName() { switch (context.getHiveOperation()) { case LOAD: case IMPORT: return hasPartitionEntity(getHiveContext().getOutputs()); case EXPORT: return hasPartitionEntity(getHiveContext().getInputs()); case QUERY: return true; } return false; }
public List<AtlasObjectId> getEntities() throws Exception { List<AtlasObjectId> ret = new ArrayList<>(); for (Entity entity : getHiveContext().getOutputs()) { if (entity.getType() == Entity.Type.DATABASE) { String dbQName = getQualifiedName(entity.getDatabase()); AtlasObjectId dbId = new AtlasObjectId(HIVE_TYPE_DB, ATTRIBUTE_QUALIFIED_NAME, dbQName); context.removeFromKnownDatabase(dbQName); ret.add(dbId); } else if (entity.getType() == Entity.Type.TABLE) { String tblQName = getQualifiedName(entity.getTable()); AtlasObjectId dbId = new AtlasObjectId(HIVE_TYPE_TABLE, ATTRIBUTE_QUALIFIED_NAME, tblQName); context.removeFromKnownTable(tblQName); ret.add(dbId); } } return ret; } }
public AtlasEntitiesWithExtInfo getEntities() throws Exception { AtlasEntitiesWithExtInfo ret = new AtlasEntitiesWithExtInfo(); for (Entity entity : getHiveContext().getOutputs()) { if (entity.getType() == Entity.Type.DATABASE) { Database db = entity.getDatabase(); if (db != null) { db = getHive().getDatabase(db.getName()); } if (db != null) { AtlasEntity dbEntity = toDbEntity(db); ret.addEntity(dbEntity); } else { LOG.error("CreateDatabase.getEntities(): failed to retrieve db"); } } } addProcessedEntities(ret); return ret; } }
@Override public void run(HookContext hookContext) throws Exception { assert(hookContext.getHookType() == HookType.PRE_EXEC_HOOK); SessionState ss = SessionState.get(); if (ss != null && ss.out instanceof FetchConverter) { boolean foundQuery = ss.getHiveOperation() == HiveOperation.QUERY && !hookContext.getQueryPlan().isForExplain(); ((FetchConverter)ss.out).foundQuery(foundQuery); } Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); UserGroupInformation ugi = hookContext.getUgi(); this.run(ss,inputs,outputs,ugi); }