public static String redactLogString(HiveConf conf, String logString) throws InstantiationException, IllegalAccessException, ClassNotFoundException { String redactedString = logString; if (conf != null && logString != null) { List<Redactor> queryRedactors = readHooksFromConf(conf, ConfVars.QUERYREDACTORHOOKS); for (Redactor redactor : queryRedactors) { redactor.setConf(conf); redactedString = redactor.redactQuery(redactedString); } } return redactedString; }
@Override public LockType getLockType(WriteEntity writeEntity) { if (writeEntity.getWriteType().equals(WriteEntity.WriteType.INSERT)) { return LockType.SHARED_READ; } return LockType.SHARED_WRITE; }
@Override public void run(HookContext hookContext) throws Exception { assert(hookContext.getHookType() == HookType.POST_EXEC_HOOK); Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); LineageInfo linfo = hookContext.getLinfo(); UserGroupInformation ugi = hookContext.getUgi(); this.run(hookContext.getQueryState(),inputs,outputs,linfo,ugi); }
public static String redactLogString(HiveConf conf, String logString) throws InstantiationException, IllegalAccessException, ClassNotFoundException { String redactedString = logString; if (conf != null && logString != null) { List<Redactor> queryRedactors = getHooks(conf, ConfVars.QUERYREDACTORHOOKS, Redactor.class); for (Redactor redactor : queryRedactors) { redactor.setConf(conf); redactedString = redactor.redactQuery(redactedString); } } return redactedString; } }
@Override public void run(HookContext hookContext) throws Exception { try { EventLogger logger = EventLogger.getInstance(hookContext.getConf()); logger.handle(hookContext); } catch (Exception e) { LOG.error("Got exceptoin while processing event: ", e); } } }
public QueryLifeTimeHookContextImpl build() { QueryLifeTimeHookContextImpl queryLifeTimeHookContext = new QueryLifeTimeHookContextImpl(); queryLifeTimeHookContext.setHiveConf(this.conf); queryLifeTimeHookContext.setCommand(this.command); queryLifeTimeHookContext.setHookContext(this.hc); return queryLifeTimeHookContext; } }
public Entity(Path d, boolean islocal, boolean complete) { this.d = d; p = null; t = null; if (islocal) { typ = Type.LOCAL_DIR; } else { typ = Type.DFS_DIR; } name = computeName(); this.complete = complete; }
public ReadEntity(Table t, ReadEntity parent) { super(t, true); initParent(parent); }
/** * toString function. */ @Override public String toString() { return getName(); }
/** * Constructor for a partition. * * @param p * Partition that is written to. */ public WriteEntity(Partition p, WriteType type) { super(p, true); setWriteTypeInternal(type); }
@Override public void run(HookContext hookContext) throws Exception { assert(hookContext.getHookType() == HookType.POST_EXEC_HOOK); Set<ReadEntity> inputs = hookContext.getInputs(); Set<WriteEntity> outputs = hookContext.getOutputs(); LineageInfo linfo = hookContext.getLinfo(); UserGroupInformation ugi = hookContext.getUgi(); this.run(hookContext.getQueryState(),inputs,outputs,linfo,ugi); }
@Override public LockType getLockType(WriteEntity writeEntity) { if (writeEntity.getWriteType().equals(WriteEntity.WriteType.INSERT)) { return LockType.SHARED_READ; } return LockType.SHARED_WRITE; }
/** * Constructor for a database. * * @param database * Database that is read or written to. * @param complete * Means the database is target, not for table or partition, etc. */ public Entity(Database database, boolean complete) { this.database = database; this.typ = Type.DATABASE; this.name = computeName(); this.complete = complete; }
public ReadEntity(Partition p, ReadEntity parent) { super(p, true); initParent(parent); }
/** * Constructor for a table. * * @param t * Table that is written to. */ public WriteEntity(Table t, WriteType type) { super(t, true); setWriteTypeInternal(type); }
@Override public LockType getLockType(WriteEntity writeEntity ) { if (writeEntity.getWriteType().equals(WriteEntity.WriteType.INSERT)) { return LockType.SHARED_READ; } return LockType.SHARED_WRITE; }
/** * Only use this if you are very sure of what you are doing. This is used by the * {@link org.apache.hadoop.hive.ql.parse.UpdateDeleteSemanticAnalyzer} to reset the types to * update or delete after rewriting and reparsing the queries. * @param type new operation type */ public void setWriteType(WriteType type) { setWriteTypeInternal(type); } private void setWriteTypeInternal(WriteType type) {
public WriteEntity(Database database, WriteType type) { super(database, true); setWriteTypeInternal(type); }
public WriteEntity(Table t, WriteType type, boolean complete) { super(t, complete); setWriteTypeInternal(type); }
public WriteEntity(DummyPartition p, WriteType type, boolean complete) { super(p, complete); setWriteTypeInternal(type); }