public void appendMapReduceParameters(StringBuilder buf, String jobType) { try { String jobConf = config.getHadoopJobConfFilePath(jobType); if (jobConf != null && jobConf.length() > 0) { buf.append(" -conf ").append(jobConf); } } catch (IOException e) { throw new RuntimeException(e); } }
public void appendMapReduceParameters(StringBuilder buf, String jobType) { try { String jobConf = config.getHadoopJobConfFilePath(jobType); if (jobConf != null && jobConf.length() > 0) { buf.append(" -conf ").append(jobConf); } } catch (IOException e) { throw new RuntimeException(e); } }
/** * * @param jobType job config file suffix name; if be null, will use the default job conf * @return the job config file path * @throws IOException */ public String getHadoopJobConfFilePath(String jobType) throws IOException { String suffix = null; if (!StringUtils.isEmpty(jobType)) { suffix = jobType; } String path = getHadoopJobConfFilePath(suffix, true); if (StringUtils.isEmpty(path)) { path = getHadoopJobConfFilePath(jobType, true); if (StringUtils.isEmpty(path)) { path = getHadoopJobConfFilePath(jobType, false); if (StringUtils.isEmpty(path)) { path = ""; } } } return path; }
conf.addResource(new Path(jobEngineConfig.getHadoopJobConfFilePath(null)));
job.getConfiguration().addResource(new Path(jobEngineConfig.getHadoopJobConfFilePath(null))); KafkaConsumerProperties kafkaConsumerProperties = KafkaConsumerProperties.getInstanceFromEnv(); job.getConfiguration().addResource(new Path(kafkaConsumerProperties.getKafkaConsumerHadoopJobConf()));
public String getHadoopJobConfFilePath(RealizationCapacity capaticy) throws IOException { String path = getHadoopJobConfFilePath(capaticy, true); if (!StringUtils.isEmpty(path)) { logger.info("Chosen job conf is : " + path); return path; } else { path = getHadoopJobConfFilePath(capaticy, false); if (!StringUtils.isEmpty(path)) { logger.info("Chosen job conf is : " + path); return path; } } return ""; }
private void appendMapReduceParameters(StringBuilder builder, JobEngineConfig engineConfig) { try { String jobConf = engineConfig.getHadoopJobConfFilePath(RealizationCapacity.MEDIUM); if (jobConf != null && jobConf.length() > 0) { builder.append(" -conf ").append(jobConf); } } catch (IOException e) { throw new RuntimeException(e); } }
private void appendMapReduceParameters(StringBuilder builder, CubeSegment seg) { try { String jobConf = engineConfig.getHadoopJobConfFilePath(seg.getCubeDesc().getModel().getCapacity()); if (jobConf != null && jobConf.length() > 0) { builder.append(" -conf ").append(jobConf); } } catch (IOException e) { throw new RuntimeException(e); } }
public static String generateInsertDataStatement(IJoinedFlatTableDesc intermediateTableDesc, String jobUUID, JobEngineConfig engineConfig) throws IOException { StringBuilder sql = new StringBuilder(); File hadoopPropertiesFile = new File(engineConfig.getHadoopJobConfFilePath(intermediateTableDesc.getCapacity()));
public String getHadoopJobConfFilePath(RealizationCapacity capaticy) throws IOException { String path = getHadoopJobConfFilePath(capaticy, true); if (!StringUtils.isEmpty(path)) { logger.info("Chosen job conf is : " + path); return path; } else { path = getHadoopJobConfFilePath(capaticy, false); if (!StringUtils.isEmpty(path)) { logger.info("Chosen job conf is : " + path); return path; } } return ""; }
public void appendMapReduceParameters(StringBuilder buf, String jobType) { try { String jobConf = config.getHadoopJobConfFilePath(jobType); if (jobConf != null && jobConf.length() > 0) { buf.append(" -conf ").append(jobConf); } } catch (IOException e) { throw new RuntimeException(e); } }
public void appendMapReduceParameters(StringBuilder buf, String jobType) { try { String jobConf = config.getHadoopJobConfFilePath(jobType); if (jobConf != null && jobConf.length() > 0) { buf.append(" -conf ").append(jobConf); } } catch (IOException e) { throw new RuntimeException(e); } }
private void appendMapReduceParameters(StringBuilder builder, JobEngineConfig engineConfig) { try { String jobConf = engineConfig.getHadoopJobConfFilePath(RealizationCapacity.MEDIUM); if (jobConf != null && jobConf.length() > 0) { builder.append(" -conf ").append(jobConf); } } catch (IOException e) { throw new RuntimeException(e); } }
/** * * @param jobType job config file suffix name; if be null, will use the default job conf * @return the job config file path * @throws IOException */ public String getHadoopJobConfFilePath(String jobType) throws IOException { String suffix = null; if (!StringUtils.isEmpty(jobType)) { suffix = jobType; } String path = getHadoopJobConfFilePath(suffix, true); if (StringUtils.isEmpty(path)) { path = getHadoopJobConfFilePath(jobType, true); if (StringUtils.isEmpty(path)) { path = getHadoopJobConfFilePath(jobType, false); if (StringUtils.isEmpty(path)) { path = ""; } } } return path; }
private void appendMapReduceParameters(StringBuilder builder, CubeSegment seg) { try { String jobConf = engineConfig.getHadoopJobConfFilePath(seg.getCubeDesc().getModel().getCapacity()); if (jobConf != null && jobConf.length() > 0) { builder.append(" -conf ").append(jobConf); } } catch (IOException e) { throw new RuntimeException(e); } }
public static String generateInsertDataStatement(IJoinedFlatTableDesc intermediateTableDesc, String jobUUID, JobEngineConfig engineConfig) throws IOException { StringBuilder sql = new StringBuilder(); File hadoopPropertiesFile = new File(engineConfig.getHadoopJobConfFilePath(intermediateTableDesc.getCapacity()));
conf.addResource(jobEngineConfig.getHadoopJobConfFilePath(null));
conf.addResource(new Path(jobEngineConfig.getHadoopJobConfFilePath(null)));
job.getConfiguration().addResource(new Path(jobEngineConfig.getHadoopJobConfFilePath(null))); KafkaConsumerProperties kafkaConsumerProperties = KafkaConsumerProperties.getInstanceFromEnv(); job.getConfiguration().addResource(new Path(kafkaConsumerProperties.getKafkaConsumerHadoopJobConf()));