@Override public void run() { try { long endTime = System.currentTimeMillis() + sleepTime * 1000; while (System.currentTimeMillis() <= endTime) { Thread.sleep(1000L); log.warn("Sleeping for {} seconds", sleepTime); } log.info("Hello World!"); super.run(); } catch (InterruptedException e) { log.error("Sleep interrupted."); Thread.currentThread().interrupt(); Throwables.propagate(e); } } }
@Override public void commit() { try { executePublishQueries(generatePublishQueries()); super.commit(); } catch (Exception e) { this.workingState = WorkUnitState.WorkingState.FAILED; log.error("Exception in HiveTask generate publish HiveQueries ", e); } } }
@Override public void shutdown() { if (getWorkingState() == WorkUnitState.WorkingState.PENDING || getWorkingState() == WorkUnitState.WorkingState.RUNNING) { this.workingState = WorkUnitState.WorkingState.CANCELLED; } }
@Override public State getPersistentState() { State state = super.getPersistentState(); state.setProp(PERSISTENT_STATE, this.taskId); return state; }
@Override public State getExecutionMetadata() { State state = super.getExecutionMetadata(); state.setProp(EXECUTION_METADATA, this.taskId); return state; } }
@Override public void run() { try { List<String> queries = generateHiveQueries(); this.hiveJdbcConnector.executeStatements(Lists.transform(this.addFiles, file -> "ADD FILE " + file).toArray(new String[]{})); this.hiveJdbcConnector.executeStatements(Lists.transform(this.addJars, file -> "ADD JAR " + file).toArray(new String[]{})); this.hiveJdbcConnector.executeStatements(this.setupQueries.toArray(new String[]{})); this.hiveJdbcConnector.executeStatements(queries.toArray(new String[queries.size()])); super.run(); } catch (Exception e) { this.workingState = WorkUnitState.WorkingState.FAILED; log.error("Exception in HiveTask generateHiveQueries ", e); } }
@Override public void commit() { if (this.eventBus != null) { this.eventBus.post(new Event(COMMIT_EVENT, Integer.parseInt(this.taskId))); } super.commit(); }
@Override public String getProgress() { return getWorkingState().toString(); }
@Override public void run() { if (this.eventBus != null) { this.eventBus.post(new Event(RUN_EVENT, Integer.parseInt(this.taskId))); } super.run(); }
@Override public void commit() { try { executePublishQueries(generatePublishQueries()); super.commit(); } catch (Exception e) { this.workingState = WorkUnitState.WorkingState.FAILED; log.error("Exception in HiveTask generate publish HiveQueries ", e); } } }
@Override public void shutdown() { if (getWorkingState() == WorkUnitState.WorkingState.PENDING || getWorkingState() == WorkUnitState.WorkingState.RUNNING) { this.workingState = WorkUnitState.WorkingState.CANCELLED; } }
@Override public void run() { try { List<String> queries = generateHiveQueries(); this.hiveJdbcConnector.executeStatements(Lists.transform(this.addFiles, file -> "ADD FILE " + file).toArray(new String[]{})); this.hiveJdbcConnector.executeStatements(Lists.transform(this.addJars, file -> "ADD JAR " + file).toArray(new String[]{})); this.hiveJdbcConnector.executeStatements(this.setupQueries.toArray(new String[]{})); this.hiveJdbcConnector.executeStatements(queries.toArray(new String[queries.size()])); super.run(); } catch (Exception e) { this.workingState = WorkUnitState.WorkingState.FAILED; log.error("Exception in HiveTask generateHiveQueries ", e); } }
@Override public String getProgress() { return getWorkingState().toString(); }