public boolean addDependentTask(Task<? extends Serializable> dependent) { return wrappedDep.addDependentTask(dependent); }
/** * Makes dependentTask dependent of task. */ private void interleaveTask(Task<? extends Serializable> dependentTask, Task<? extends Serializable> task) { for (Task<? extends Serializable> parentOfStatsTask : dependentTask.getParentTasks()) { parentOfStatsTask.addDependentTask(task); } for (Task<? extends Serializable> parentOfCrtTblTask : task.getParentTasks()) { parentOfCrtTblTask.removeDependentTask(dependentTask); } task.addDependentTask(dependentTask); }
/** * Makes the exportTask run after all other tasks of the "insert into T ..." are done. */ private void addExportTask(List<Task<?>> rootTasks, Task<ExportWork> exportTask, Task<DDLWork> alterTable) { for (Task<? extends Serializable> t : rootTasks) { if (t.getNumChild() <= 0) { //todo: ConditionalTask#addDependentTask(Task) doesn't do the right thing: HIVE-18978 t.addDependentTask(alterTable); //this is a leaf so add exportTask to follow it alterTable.addDependentTask(exportTask); } else { addExportTask(t.getDependentTasks(), exportTask, alterTable); } } }
/** * Adds the dependencyTaskForMultiInsert in ctx as a dependent of parentTask. If mvTask is a * load table, and HIVE_MULTI_INSERT_ATOMIC_OUTPUTS is set, adds mvTask as a dependent of * dependencyTaskForMultiInsert in ctx, otherwise adds mvTask as a dependent of parentTask as * well. * * @param mvTask * @param hconf * @param parentTask * @param dependencyTask */ public static void addDependentMoveTasks(Task<MoveWork> mvTask, HiveConf hconf, Task<? extends Serializable> parentTask, DependencyCollectionTask dependencyTask) { if (mvTask != null) { if (dependencyTask != null) { parentTask.addDependentTask(dependencyTask); if (mvTask.getWork().getLoadTableWork() != null) { // Moving tables/partitions depend on the dependencyTask dependencyTask.addDependentTask(mvTask); } else { // Moving files depends on the parentTask (we still want the dependencyTask to depend // on the parentTask) parentTask.addDependentTask(mvTask); } } else { parentTask.addDependentTask(mvTask); } } }
/** * Add a dependent task on the current conditional task. The task will not be a direct child of * conditional task. Actually it will be added as child task of associated tasks. * * @return true if the task got added false if it already existed */ @Override public boolean addDependentTask(Task<? extends Serializable> dependent) { boolean ret = false; if (getListTasks() != null) { ret = true; for (Task<? extends Serializable> tsk : getListTasks()) { ret = ret & tsk.addDependentTask(dependent); } } return ret; }
/** * Add a dependent task on the current conditional task. The task will not be a direct child of * conditional task. Actually it will be added as child task of associated tasks. * * @return true if the task got added false if it already existed */ @Override public boolean addDependentTask(Task<? extends Serializable> dependent) { boolean ret = false; if (getListTasks() != null) { ret = true; for (Task<? extends Serializable> tsk : getListTasks()) { ret = ret & tsk.addDependentTask(dependent); } } return ret; }
/** * Insert the rewrite tasks at the head of the pctx task tree * @param pctx * @param context * @param chosenRewrite */ private void insertIndexQuery(ParseContext pctx, IndexWhereProcCtx context, List<Task<?>> chosenRewrite) { Task<?> wholeTableScan = context.getCurrentTask(); LinkedHashSet<Task<?>> rewriteLeaves = new LinkedHashSet<Task<?>>(); findLeaves(chosenRewrite, rewriteLeaves); for (Task<?> leaf : rewriteLeaves) { leaf.addDependentTask(wholeTableScan); // add full scan task as child for every index query task } // replace the original with the index sub-query as a root task pctx.replaceRootTask(wholeTableScan, chosenRewrite); }
private Task<? extends Serializable> dbUpdateReplStateTask(String dbName, String replState, Task<? extends Serializable> preCursor) { HashMap<String, String> mapProp = new HashMap<>(); mapProp.put(ReplicationSpec.KEY.CURR_STATE_ID.toString(), replState); AlterDatabaseDesc alterDbDesc = new AlterDatabaseDesc(dbName, mapProp, new ReplicationSpec(replState, replState)); Task<? extends Serializable> updateReplIdTask = TaskFactory.get(new DDLWork(inputs, outputs, alterDbDesc), conf); // Link the update repl state task with dependency collection task if (preCursor != null) { preCursor.addDependentTask(updateReplIdTask); log.debug("Added {}:{} as a precursor of {}:{}", preCursor.getClass(), preCursor.getId(), updateReplIdTask.getClass(), updateReplIdTask.getId()); } return updateReplIdTask; }
private List<Task<? extends Serializable>> analyzeEventLoad(MessageHandler.Context context) throws SemanticException { MessageHandler messageHandler = context.dmd.getDumpType().handler(); List<Task<? extends Serializable>> tasks = messageHandler.handle(context); if (context.precursor != null) { for (Task<? extends Serializable> t : tasks) { context.precursor.addDependentTask(t); log.debug("Added {}:{} as a precursor of {}:{}", context.precursor.getClass(), context.precursor.getId(), t.getClass(), t.getId()); } } inputs.addAll(messageHandler.readEntities()); outputs.addAll(messageHandler.writeEntities()); return addUpdateReplStateTasks(StringUtils.isEmpty(context.tableName), messageHandler.getUpdatedMetadata(), tasks); }
private Task<? extends Serializable> getMigrationCommitTxnTask(String dbName, String tableName, List<Map <String, String>> partSpec, String replState, boolean needUpdateDBReplId, Task<? extends Serializable> preCursor) throws SemanticException { ReplLastIdInfo replLastIdInfo = new ReplLastIdInfo(dbName, Long.parseLong(replState)); replLastIdInfo.setTable(tableName); replLastIdInfo.setNeedUpdateDBReplId(needUpdateDBReplId); if (partSpec != null && !partSpec.isEmpty()) { List<String> partitionList = new ArrayList<>(); for (Map <String, String> part : partSpec) { try { partitionList.add(Warehouse.makePartName(part, false)); } catch (MetaException e) { throw new SemanticException(e.getMessage()); } } replLastIdInfo.setPartitionList(partitionList); } Task<? extends Serializable> updateReplIdTxnTask = TaskFactory.get(new ReplTxnWork(replLastIdInfo, ReplTxnWork .OperationType.REPL_MIGRATION_COMMIT_TXN), conf); if (preCursor != null) { preCursor.addDependentTask(updateReplIdTxnTask); log.debug("Added {}:{} as a precursor of {}:{}", preCursor.getClass(), preCursor.getId(), updateReplIdTxnTask.getClass(), updateReplIdTxnTask.getId()); } return updateReplIdTxnTask; }
private void processLinkedFileDesc(GenMRProcContext ctx, Task<? extends Serializable> childTask) throws SemanticException { Task<? extends Serializable> currTask = ctx.getCurrTask(); TableScanOperator currTopOp = ctx.getCurrTopOp(); if (currTopOp != null && !ctx.isSeenOp(currTask, currTopOp)) { String currAliasId = ctx.getCurrAliasId(); GenMapRedUtils.setTaskPlan(currAliasId, currTopOp, currTask, false, ctx); } if (childTask != null) { currTask.addDependentTask(childTask); } }
private static Task<?> loadTable(URI fromURI, Table table, boolean replace, Path tgtPath, ReplicationSpec replicationSpec, EximUtil.SemanticAnalyzerWrapperContext x) { Path dataPath = new Path(fromURI.toString(), EximUtil.DATA_PATH_NAME); Path tmpPath = x.getCtx().getExternalTmpPath(tgtPath); Task<?> copyTask = ReplCopyTask.getLoadCopyTask(replicationSpec, dataPath, tmpPath, x.getConf()); LoadTableDesc loadTableWork = new LoadTableDesc(tmpPath, Utilities.getTableDesc(table), new TreeMap<String, String>(), replace); Task<?> loadTableTask = TaskFactory.get(new MoveWork(x.getInputs(), x.getOutputs(), loadTableWork, null, false), x.getConf()); copyTask.addDependentTask(loadTableTask); x.getTasks().add(copyTask); return loadTableTask; }
private void setUpDependencies(TaskTracker parentTasks, TaskTracker childTasks) { if (parentTasks.hasTasks()) { for (Task<? extends Serializable> parentTask : parentTasks.tasks()) { for (Task<? extends Serializable> childTask : childTasks.tasks()) { parentTask.addDependentTask(childTask); } } } else { for (Task<? extends Serializable> childTask : childTasks.tasks()) { parentTasks.addTask(childTask); } } }
private Task<? extends Serializable> tableUpdateReplStateTask(String dbName, String tableName, Map<String, String> partSpec, String replState, Task<? extends Serializable> preCursor) throws SemanticException { HashMap<String, String> mapProp = new HashMap<>(); mapProp.put(ReplicationSpec.KEY.CURR_STATE_ID.toString(), replState); AlterTableDesc alterTblDesc = new AlterTableDesc( AlterTableDesc.AlterTableTypes.ADDPROPS, new ReplicationSpec(replState, replState)); alterTblDesc.setProps(mapProp); alterTblDesc.setOldName(StatsUtils.getFullyQualifiedTableName(dbName, tableName)); alterTblDesc.setPartSpec((HashMap<String, String>) partSpec); Task<? extends Serializable> updateReplIdTask = TaskFactory.get(new DDLWork(inputs, outputs, alterTblDesc), conf); // Link the update repl state task with dependency collection task if (preCursor != null) { preCursor.addDependentTask(updateReplIdTask); log.debug("Added {}:{} as a precursor of {}:{}", preCursor.getClass(), preCursor.getId(), updateReplIdTask.getClass(), updateReplIdTask.getId()); } return updateReplIdTask; }
private void processLinkedFileDesc(GenMRProcContext ctx, Task<? extends Serializable> childTask) throws SemanticException { Task<? extends Serializable> currTask = ctx.getCurrTask(); TableScanOperator currTopOp = ctx.getCurrTopOp(); if (currTopOp != null && !ctx.isSeenOp(currTask, currTopOp)) { String currAliasId = ctx.getCurrAliasId(); GenMapRedUtils.setTaskPlan(currAliasId, currTopOp, currTask, false, ctx); } if (childTask != null) { currTask.addDependentTask(childTask); } }
public static List<Task<? extends Serializable>> addOpenTxnTaskForMigration(String actualDbName, String actualTblName, HiveConf conf, UpdatedMetaDataTracker updatedMetaDataTracker, Task<? extends Serializable> childTask, org.apache.hadoop.hive.metastore.api.Table tableObj) throws IOException, TException { List<Task<? extends Serializable>> taskList = new ArrayList<>(); taskList.add(childTask); if (conf.getBoolVar(HiveConf.ConfVars.HIVE_STRICT_MANAGED_TABLES) && updatedMetaDataTracker != null && !AcidUtils.isTransactionalTable(tableObj) && TableType.valueOf(tableObj.getTableType()) == TableType.MANAGED_TABLE) { //TODO : isPathOwnByHive is hard coded to true, need to get it from repl dump metadata. HiveStrictManagedMigration.TableMigrationOption migrationOption = HiveStrictManagedMigration.determineMigrationTypeAutomatically(tableObj, TableType.MANAGED_TABLE, null, conf, null, true); if (migrationOption == MANAGED) { //if conversion to managed table. Task<? extends Serializable> replTxnTask = TaskFactory.get(new ReplTxnWork(actualDbName, actualTblName, ReplTxnWork.OperationType.REPL_MIGRATION_OPEN_TXN), conf); replTxnTask.addDependentTask(childTask); updatedMetaDataTracker.setNeedCommitTxn(true); taskList.add(replTxnTask); } } return taskList; } }
@Test @SuppressWarnings("unchecked") public void testGetTasksRecursion() { Task<MapredWork> rootTask = getMapredWork(); Task<MapredWork> child1 = getMapredWork(); Task<MapredWork> child2 = getMapredWork(); Task<MapredWork> child11 = getMapredWork(); rootTask.addDependentTask(child1); rootTask.addDependentTask(child2); child1.addDependentTask(child11); assertEquals(Lists.newArrayList(rootTask, child1, child2, child11), Utilities.getMRTasks(getTestDiamondTaskGraph(rootTask))); } }
public TaskTracker tasks() throws SemanticException { try { Database dbInMetadata = readDbMetadata(); String dbName = dbInMetadata.getName(); Task<? extends Serializable> dbRootTask = null; ReplLoadOpType loadDbType = getLoadDbType(dbName); switch (loadDbType) { case LOAD_NEW: dbRootTask = createDbTask(dbInMetadata); break; case LOAD_REPLACE: dbRootTask = alterDbTask(dbInMetadata); break; default: break; } if (dbRootTask != null) { dbRootTask.addDependentTask(setOwnerInfoTask(dbInMetadata)); tracker.addTask(dbRootTask); } return tracker; } catch (Exception e) { throw new SemanticException(e.getMessage(), e); } }
private void resolveTask(DriverContext driverContext) throws HiveException { for (Task<? extends Serializable> tsk : getListTasks()) { if (!resTasks.contains(tsk)) { driverContext.remove(tsk); console.printInfo(tsk.getId() + " is filtered out by condition resolver."); if (tsk.isMapRedTask()) { driverContext.incCurJobNo(1); } //recursively remove this task from its children's parent task tsk.removeFromChildrenTasks(); } else { if (getParentTasks() != null) { // This makes it so that we can go back up the tree later for (Task<? extends Serializable> task : getParentTasks()) { task.addDependentTask(tsk); } } // resolved task if (driverContext.addToRunnable(tsk)) { console.printInfo(tsk.getId() + " is selected by condition resolver."); } } } }
private void analyzeAlterIndexRebuild(ASTNode ast) throws SemanticException { String[] qualified = getQualifiedTableName((ASTNode) ast.getChild(0)); String indexName = unescapeIdentifier(ast.getChild(1).getText()); HashMap<String, String> partSpec = null; Tree part = ast.getChild(2); if (part != null) { partSpec = getValidatedPartSpec(getTable(qualified), (ASTNode)part, conf, false); } List<Task<?>> indexBuilder = getIndexBuilderMapRed(qualified, indexName, partSpec); rootTasks.addAll(indexBuilder); // Handle updating index timestamps AlterIndexDesc alterIdxDesc = new AlterIndexDesc(AlterIndexTypes.UPDATETIMESTAMP); alterIdxDesc.setIndexName(indexName); alterIdxDesc.setBaseTableName(getDotName(qualified)); alterIdxDesc.setSpec(partSpec); Task<?> tsTask = TaskFactory.get(new DDLWork(alterIdxDesc), conf); for (Task<?> t : indexBuilder) { t.addDependentTask(tsTask); } }