static void init(Settings settings, Log log) { InitializationUtils.checkIdForOperation(settings); InitializationUtils.setFieldExtractorIfNotSet(settings, HiveFieldExtractor.class, log); InitializationUtils.discoverEsVersion(settings, log); }
private void init(Configuration cfg) throws IOException { Settings settings = HadoopSettingsManager.loadFrom(cfg); Assert.hasText(settings.getResourceWrite(), String.format("No resource ['%s'] (index/query/location) specified", ES_RESOURCE)); // Need to discover the ESVersion before checking if index exists. InitializationUtils.discoverEsVersion(settings, log); InitializationUtils.checkIdForOperation(settings); InitializationUtils.checkIndexExistence(settings); if (HadoopCfgUtils.getReduceTasks(cfg) != null) { if (HadoopCfgUtils.getSpeculativeReduce(cfg)) { log.warn("Speculative execution enabled for reducer - consider disabling it to prevent data corruption"); } } else { if (HadoopCfgUtils.getSpeculativeMap(cfg)) { log.warn("Speculative execution enabled for mapper - consider disabling it to prevent data corruption"); } } //log.info(String.format("Starting to write/index to [%s][%s]", settings.getTargetUri(), settings.getTargetResource())); } }
private void init(String location, Job job, boolean read) { Settings settings = HadoopSettingsManager.loadFrom(job.getConfiguration()).merge(properties); settings = (read ? settings.setResourceRead(location) : settings.setResourceWrite(location)); InitializationUtils.checkIdForOperation(settings); InitializationUtils.setValueWriterIfNotSet(settings, PigValueWriter.class, log); InitializationUtils.setValueReaderIfNotSet(settings, PigValueReader.class, log); InitializationUtils.setBytesConverterIfNeeded(settings, PigBytesConverter.class, log); InitializationUtils.setFieldExtractorIfNotSet(settings, PigFieldExtractor.class, log); isJSON = settings.getOutputAsJson(); }
static void init(Settings settings, Log log) { InitializationUtils.checkIdForOperation(settings); InitializationUtils.setFieldExtractorIfNotSet(settings, HiveFieldExtractor.class, log); InitializationUtils.discoverEsVersion(settings, log); }
private void init(Configuration cfg) throws IOException { Settings settings = HadoopSettingsManager.loadFrom(cfg); Assert.hasText(settings.getResourceWrite(), String.format("No resource ['%s'] (index/query/location) specified", ES_RESOURCE)); // lazy-init RestRepository client = null; InitializationUtils.checkIdForOperation(settings); InitializationUtils.checkIndexExistence(settings, client); if (HadoopCfgUtils.getReduceTasks(cfg) != null) { if (HadoopCfgUtils.getSpeculativeReduce(cfg)) { log.warn("Speculative execution enabled for reducer - consider disabling it to prevent data corruption"); } } else { if (HadoopCfgUtils.getSpeculativeMap(cfg)) { log.warn("Speculative execution enabled for mapper - consider disabling it to prevent data corruption"); } } //log.info(String.format("Starting to write/index to [%s][%s]", settings.getTargetUri(), settings.getTargetResource())); } }
private void init(Configuration cfg) throws IOException { Settings settings = HadoopSettingsManager.loadFrom(cfg); Assert.hasText(settings.getResourceWrite(), String.format("No resource ['%s'] (index/query/location) specified", ES_RESOURCE)); // Need to discover the ESVersion before checking if index exists. InitializationUtils.discoverEsVersion(settings, log); InitializationUtils.checkIdForOperation(settings); InitializationUtils.checkIndexExistence(settings); if (HadoopCfgUtils.getReduceTasks(cfg) != null) { if (HadoopCfgUtils.getSpeculativeReduce(cfg)) { log.warn("Speculative execution enabled for reducer - consider disabling it to prevent data corruption"); } } else { if (HadoopCfgUtils.getSpeculativeMap(cfg)) { log.warn("Speculative execution enabled for mapper - consider disabling it to prevent data corruption"); } } //log.info(String.format("Starting to write/index to [%s][%s]", settings.getTargetUri(), settings.getTargetResource())); } }
private void init(Configuration cfg) throws IOException { Settings settings = HadoopSettingsManager.loadFrom(cfg); Assert.hasText(settings.getResourceWrite(), String.format("No resource ['%s'] (index/query/location) specified", ES_RESOURCE)); // Need to discover the ESVersion before checking if index exists. InitializationUtils.discoverEsVersion(settings, log); InitializationUtils.checkIdForOperation(settings); InitializationUtils.checkIndexExistence(settings); if (HadoopCfgUtils.getReduceTasks(cfg) != null) { if (HadoopCfgUtils.getSpeculativeReduce(cfg)) { log.warn("Speculative execution enabled for reducer - consider disabling it to prevent data corruption"); } } else { if (HadoopCfgUtils.getSpeculativeMap(cfg)) { log.warn("Speculative execution enabled for mapper - consider disabling it to prevent data corruption"); } } //log.info(String.format("Starting to write/index to [%s][%s]", settings.getTargetUri(), settings.getTargetResource())); } }
private void init(Configuration cfg) throws IOException { Settings settings = HadoopSettingsManager.loadFrom(cfg); Assert.hasText(settings.getResourceWrite(), String.format("No resource ['%s'] (index/query/location) specified", ES_RESOURCE)); // Need to discover the ESVersion before checking if index exists. InitializationUtils.discoverEsVersion(settings, log); InitializationUtils.checkIdForOperation(settings); InitializationUtils.checkIndexExistence(settings); if (HadoopCfgUtils.getReduceTasks(cfg) != null) { if (HadoopCfgUtils.getSpeculativeReduce(cfg)) { log.warn("Speculative execution enabled for reducer - consider disabling it to prevent data corruption"); } } else { if (HadoopCfgUtils.getSpeculativeMap(cfg)) { log.warn("Speculative execution enabled for mapper - consider disabling it to prevent data corruption"); } } //log.info(String.format("Starting to write/index to [%s][%s]", settings.getTargetUri(), settings.getTargetResource())); } }
private void init(String location, Job job, boolean read) { Settings settings = HadoopSettingsManager.loadFrom(job.getConfiguration()).merge(properties); settings = (read ? settings.setResourceRead(location) : settings.setResourceWrite(location)); InitializationUtils.checkIdForOperation(settings); InitializationUtils.setValueWriterIfNotSet(settings, PigValueWriter.class, log); InitializationUtils.setValueReaderIfNotSet(settings, PigValueReader.class, log); InitializationUtils.setBytesConverterIfNeeded(settings, PigBytesConverter.class, log); InitializationUtils.setFieldExtractorIfNotSet(settings, PigFieldExtractor.class, log); isJSON = settings.getOutputAsJson(); }