@Override public void addTask(AbstractExecutable executable) { executable.setParentExecutable(this); executable.setId(getId() + "-" + String.format(Locale.ROOT, "%02d", subTasks.size())); this.subTasks.add(executable); }
@Override protected void onExecuteStart(ExecutableContext executableContext) { final long startTime = getStartTime(); Map<String, String> info = Maps.newHashMap(); info.put(BUILD_INSTANCE, DistributedLockFactory.processAndHost()); if (startTime > 0) { getManager().updateJobOutput(getId(), ExecutableState.RUNNING, info, null); } else { info.put(START_TIME, Long.toString(System.currentTimeMillis())); getManager().updateJobOutput(getId(), ExecutableState.RUNNING, info, null); } }
@Override public void addStepPhase1_MergeDictionary(DefaultChainedExecutable jobFlow) { jobFlow.addTask(createMergeOffsetStep(jobFlow.getId(), cubeSegment)); } }
@Override public void addStepPhase2_BuildCube(CubeSegment seg, List<CubeSegment> mergingSegments, DefaultChainedExecutable jobFlow) { jobFlow.addTask( steps.createMergeCuboidDataStep(seg, mergingSegments, jobFlow.getId(), MergeCuboidJob.class)); jobFlow.addTask(steps.createConvertCuboidToHfileStep(jobFlow.getId())); jobFlow.addTask(steps.createBulkLoadStep(jobFlow.getId())); }
@Override public void addStepPhase2_BuildDictionary(DefaultChainedExecutable jobFlow) { jobFlow.addTask(steps.createCreateHTableStep(jobFlow.getId())); }
@Override public void addStepPhase1_MergeDictionary(DefaultChainedExecutable jobFlow) { jobFlow.addTask(steps.createCreateHTableStep(jobFlow.getId())); }
@Override public void addStepPhase1_MergeDictionary(DefaultChainedExecutable jobFlow) { jobFlow.addTask(steps.createCreateHTableStep(jobFlow.getId())); }
@Override public void addStepPhase2_CreateHTable(DefaultChainedExecutable jobFlow) { jobFlow.addTask(steps.createCreateHTableStep(jobFlow.getId(), CuboidModeEnum.RECOMMEND)); }
public void addCubingGarbageCollectionSteps(DefaultChainedExecutable jobFlow) { String jobId = jobFlow.getId(); List<String> toDeletePaths = new ArrayList<>(); toDeletePaths.add(getFactDistinctColumnsPath(jobId)); toDeletePaths.add(getHFilePath(jobId)); toDeletePaths.add(getShrunkenDictionaryPath(jobId)); HDFSPathGarbageCollectionStep step = new HDFSPathGarbageCollectionStep(); step.setName(ExecutableConstants.STEP_NAME_GARBAGE_COLLECTION_HBASE); step.setDeletePaths(toDeletePaths); step.setJobId(jobId); jobFlow.addTask(step); }
public void addOptimizeGarbageCollectionSteps(DefaultChainedExecutable jobFlow) { String jobId = jobFlow.getId(); List<String> toDeletePaths = new ArrayList<>(); toDeletePaths.add(getOptimizationRootPath(jobId)); HDFSPathGarbageCollectionStep step = new HDFSPathGarbageCollectionStep(); step.setName(ExecutableConstants.STEP_NAME_GARBAGE_COLLECTION_HDFS); step.setDeletePaths(toDeletePaths); step.setJobId(jobId); jobFlow.addTask(step); }
protected void addStepPhase1_DoMaterializeLookupTable(DefaultChainedExecutable jobFlow) { final String hiveInitStatements = JoinedFlatTable.generateHiveInitStatements(flatTableDatabase); final String jobWorkingDir = getJobWorkingDir(jobFlow, hdfsWorkingDir); AbstractExecutable task = createLookupHiveViewMaterializationStep(hiveInitStatements, jobWorkingDir, flatDesc, hiveViewIntermediateTables, jobFlow.getId()); if (task != null) { jobFlow.addTask(task); } }
protected static String getJobWorkingDir(DefaultChainedExecutable jobFlow, String hdfsWorkingDir) { String jobWorkingDir = JobBuilderSupport.getJobWorkingDir(hdfsWorkingDir, jobFlow.getId()); if (KylinConfig.getInstanceFromEnv().getHiveTableDirCreateFirst()) { // Create work dir to avoid hive create it, // the difference is that the owners are different. checkAndCreateWorkDir(jobWorkingDir); } return jobWorkingDir; }
public void addMergingGarbageCollectionSteps(DefaultChainedExecutable jobFlow) { String jobId = jobFlow.getId(); jobFlow.addTask(createMergeGCStep()); List<String> toDeletePaths = new ArrayList<>(); toDeletePaths.addAll(getMergingHDFSPaths()); toDeletePaths.add(getHFilePath(jobId)); HDFSPathGarbageCollectionStep step = new HDFSPathGarbageCollectionStep(); step.setName(ExecutableConstants.STEP_NAME_GARBAGE_COLLECTION_HDFS); step.setDeletePaths(toDeletePaths); step.setJobId(jobId); jobFlow.addTask(step); }
public void addCheckpointGarbageCollectionSteps(DefaultChainedExecutable jobFlow) { String jobId = jobFlow.getId(); jobFlow.addTask(createOptimizeGCStep()); List<String> toDeletePaths = new ArrayList<>(); toDeletePaths.addAll(getOptimizeHDFSPaths()); HDFSPathGarbageCollectionStep step = new HDFSPathGarbageCollectionStep(); step.setName(ExecutableConstants.STEP_NAME_GARBAGE_COLLECTION_HDFS); step.setDeletePaths(toDeletePaths); step.setJobId(jobId); jobFlow.addTask(step); }
@Test public void testSingleTaskJob() throws Exception { logger.info("testSingleTaskJob"); DefaultChainedExecutable job = new DefaultChainedExecutable(); BaseTestExecutable task1 = new SucceedTestExecutable(); job.addTask(task1); execMgr.addJob(job); waitForJobFinish(job.getId(), 10000); Assert.assertEquals(ExecutableState.SUCCEED, execMgr.getOutput(job.getId()).getState()); Assert.assertEquals(ExecutableState.SUCCEED, execMgr.getOutput(task1.getId()).getState()); }
@Test public void testDefaultChainedExecutable() throws Exception { DefaultChainedExecutable job = new DefaultChainedExecutable(); job.addTask(new SucceedTestExecutable()); job.addTask(new SucceedTestExecutable()); service.addJob(job); assertEquals(2, job.getTasks().size()); AbstractExecutable anotherJob = service.getJob(job.getId()); assertEquals(DefaultChainedExecutable.class, anotherJob.getClass()); assertEquals(2, ((DefaultChainedExecutable) anotherJob).getTasks().size()); assertJobEqual(job, anotherJob); }