public JobBuilderSupport(CubeSegment seg, String submitter) { Preconditions.checkNotNull(seg, "segment cannot be null"); this.config = new JobEngineConfig(seg.getConfig()); this.seg = seg; this.submitter = submitter; }
public HBaseLookupMRSteps(CubeInstance cube) { this.cube = cube; this.config = new JobEngineConfig(cube.getConfig()); }
private void deleteHiveTables(List<String> allHiveTablesNeedToBeDeleted, Map<String, String> segmentId2JobId) throws IOException { final JobEngineConfig engineConfig = new JobEngineConfig(config); final int uuidLength = 36;
@Override protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException { try { config = new JobEngineConfig(context.getConfig()); List<String> toDeletePaths = getDeletePaths(); dropHdfsPathOnCluster(toDeletePaths, HadoopUtil.getWorkingFileSystem()); if (StringUtils.isNotEmpty(context.getConfig().getHBaseClusterFs())) { dropHdfsPathOnCluster(toDeletePaths, FileSystem.get(HBaseConnection.getCurrentHBaseConfiguration())); } } catch (IOException e) { logger.error("job:" + getId() + " execute finished with exception", e); output.append("\n").append(e.getLocalizedMessage()); return new ExecuteResult(ExecuteResult.State.ERROR, output.toString(), e); } return new ExecuteResult(ExecuteResult.State.SUCCEED, output.toString()); }
@SuppressWarnings("unchecked") @Override public void afterPropertiesSet() throws Exception { String timeZone = getConfig().getTimeZone(); TimeZone tzone = TimeZone.getTimeZone(timeZone); TimeZone.setDefault(tzone); final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv(); final Scheduler<AbstractExecutable> scheduler = (Scheduler<AbstractExecutable>) SchedulerFactory .scheduler(kylinConfig.getSchedulerType()); scheduler.init(new JobEngineConfig(kylinConfig), new ZookeeperJobLock()); Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { @Override public void run() { try { scheduler.shutdown(); } catch (SchedulerException e) { logger.error("error occurred to shutdown scheduler", e); } } })); }
JobEngineConfig jobEngineConfig = new JobEngineConfig(kylinConfig); conf.addResource(new Path(jobEngineConfig.getHadoopJobConfFilePath(null)));
JobEngineConfig jobEngineConfig = new JobEngineConfig(KylinConfig.getInstanceFromEnv()); job.getConfiguration().addResource(new Path(jobEngineConfig.getHadoopJobConfFilePath(null))); KafkaConsumerProperties kafkaConsumerProperties = KafkaConsumerProperties.getInstanceFromEnv();
private void cleanUnusedHdfsFiles(FileSystem fs, UnusedHdfsFileCollector collector) throws IOException { final JobEngineConfig engineConfig = new JobEngineConfig(config); final CubeManager cubeMgr = CubeManager.getInstance(config);
private void cleanUnusedHdfsFiles(Configuration conf) throws IOException { JobEngineConfig engineConfig = new JobEngineConfig(KylinConfig.getInstanceFromEnv()); CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
public BaseBatchCubingInputSide(CubeSegment seg, IJoinedFlatTableDesc flatDesc) { this.conf = new JobEngineConfig(KylinConfig.getInstanceFromEnv()); this.config = seg.getConfig(); this.flatDesc = flatDesc; this.hiveTableDatabase = config.getHiveDatabaseForIntermediateTable(); this.seg = seg; this.cubeDesc = seg.getCubeDesc(); this.cubeName = seg.getCubeInstance().getName(); }
protected void startScheduler() throws SchedulerException { scheduler = DefaultScheduler.createInstance(); scheduler.init(new JobEngineConfig(KylinConfig.getInstanceFromEnv()), new MockJobLock()); if (!scheduler.hasStarted()) { throw new RuntimeException("scheduler has not been started"); } }
@Test public void testPropertiesHotLoad() throws IOException { KylinConfig baseConfig = KylinConfig.getInstanceFromEnv(); JobEngineConfig jobEngineConfig = new JobEngineConfig(baseConfig); assertEquals(10, jobEngineConfig.getMaxConcurrentJobLimit()); updateProperty("kylin.job.max-concurrent-jobs", "20"); KylinConfig.getInstanceFromEnv().reloadFromSiteProperties(); assertEquals(20, jobEngineConfig.getMaxConcurrentJobLimit()); } }
private void cleanUnusedHdfsFiles(Configuration conf) throws IOException { JobEngineConfig engineConfig = new JobEngineConfig(KylinConfig.getInstanceFromEnv()); CubeManager cubeMgr = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
JobEngineConfig engineConfig = new JobEngineConfig(config); IJoinedFlatTableDesc intermediateTableDesc = new IIJoinedFlatTableDesc(iidesc); String dropTableHql = JoinedFlatTable.generateDropTableStatement(intermediateTableDesc, jobUUID);
@Test public void testGenerateInsertSql() throws IOException { String sqls = JoinedFlatTable.generateInsertDataStatement(intermediateTableDesc, fakeJobUUID, new JobEngineConfig(KylinConfig.getInstanceFromEnv())); System.out.println(sqls); int length = sqls.length(); assertEquals(1155, length); }
@Before public void setup() throws Exception { createTestMetadata(); setFinalStatic(ExecutableConstants.class.getField("DEFAULT_SCHEDULER_INTERVAL_SECONDS"), 10); jobService = ExecutableManager.getInstance(KylinConfig.getInstanceFromEnv()); scheduler = DefaultScheduler.getInstance(); scheduler.init(new JobEngineConfig(KylinConfig.getInstanceFromEnv())); if (!scheduler.hasStarted()) { throw new RuntimeException("scheduler has not been started"); } }
@Before public void before() throws Exception { HBaseMetadataTestCase.staticCreateTestMetadata(AbstractKylinTestCase.SANDBOX_TEST_DATA); DeployUtil.initCliWorkDir(); DeployUtil.deployMetadata(); DeployUtil.overrideJobJarLocations(); final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv(); jobService = ExecutableManager.getInstance(kylinConfig); scheduler = DefaultScheduler.getInstance(); scheduler.init(new JobEngineConfig(kylinConfig)); if (!scheduler.hasStarted()) { throw new RuntimeException("scheduler has not been started"); } cubeManager = CubeManager.getInstance(kylinConfig); jobEngineConfig = new JobEngineConfig(kylinConfig); for (String jobId : jobService.getAllJobIds()) { if(jobService.getJob(jobId) instanceof CubingJob){ jobService.deleteJob(jobId); } } }
public JobBuilderSupport(CubeSegment seg, String submitter) { Preconditions.checkNotNull(seg, "segment cannot be null"); this.config = new JobEngineConfig(seg.getConfig()); this.seg = seg; this.submitter = submitter; }
@Before public void before() throws Exception { HBaseMetadataTestCase.staticCreateTestMetadata(AbstractKylinTestCase.SANDBOX_TEST_DATA); DeployUtil.initCliWorkDir(); // DeployUtil.deployMetadata(); DeployUtil.overrideJobJarLocations(); final KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv(); jobService = ExecutableManager.getInstance(kylinConfig); scheduler = DefaultScheduler.getInstance(); scheduler.init(new JobEngineConfig(kylinConfig)); if (!scheduler.hasStarted()) { throw new RuntimeException("scheduler has not been started"); } iiManager = IIManager.getInstance(kylinConfig); jobEngineConfig = new JobEngineConfig(kylinConfig); for (String jobId : jobService.getAllJobIds()) { if(jobService.getJob(jobId) instanceof IIJob){ jobService.deleteJob(jobId); } } IIInstance ii = iiManager.getII(TEST_II_NAME); if (ii.getStatus() != RealizationStatusEnum.DISABLED) { ii.setStatus(RealizationStatusEnum.DISABLED); iiManager.updateII(ii); } }
public BaseBatchCubingInputSide(CubeSegment seg, IJoinedFlatTableDesc flatDesc) { this.conf = new JobEngineConfig(KylinConfig.getInstanceFromEnv()); this.config = seg.getConfig(); this.flatDesc = flatDesc; this.hiveTableDatabase = config.getHiveDatabaseForIntermediateTable(); this.seg = seg; this.cubeDesc = seg.getCubeDesc(); this.cubeName = seg.getCubeInstance().getName(); }