@Override protected void setup(Context context) throws IOException, InterruptedException { super.setup(context); Configuration conf = context.getConfiguration(); keyLength = conf.getInt(KEYLEN_CONF, KEYLEN_DEFAULT); valLength = conf.getInt(VALLEN_CONF, VALLEN_DEFAULT); multiTableMapper = conf.getBoolean(HFileOutputFormat2.MULTI_TABLE_HFILEOUTPUTFORMAT_CONF_KEY, false); if (multiTableMapper) { tables = TABLE_NAMES; } else { tables = new TableName[]{TABLE_NAMES[0]}; } }
@Override protected void setup(Context context) throws IOException, InterruptedException { super.setup(context); Configuration conf = context.getConfiguration(); keyLength = conf.getInt(KEYLEN_CONF, KEYLEN_DEFAULT); valLength = conf.getInt(VALLEN_CONF, VALLEN_DEFAULT); multiTableMapper = conf.getBoolean(HFileOutputFormat2.MULTI_TABLE_HFILEOUTPUTFORMAT_CONF_KEY, false); if (multiTableMapper) { tables = TABLE_NAMES; } else { tables = new TableName[]{TABLE_NAMES[0]}; } }
@Override protected void setup(Context context) throws IOException, InterruptedException { Map<String, String> configMap = Maps.newHashMap(); SharedResourcesBrokerFactory.addBrokerKeys(configMap, context.getConfiguration()); this.broker = SharedResourcesBrokerFactory.createDefaultTopLevelBroker(ConfigFactory.parseMap(configMap), SimpleScopeType.GLOBAL.defaultScopeInstance()); super.setup(context); }
/** {@inheritDoc} */ @Override protected void setup(Context ctx) throws IOException, InterruptedException { super.setup(ctx); wasSetUp = true; HadoopErrorSimulator.instance().onMapSetup(); }
@Override protected void setup(Context context) throws IOException, InterruptedException { super.setup(context); org.apache.hadoop.conf.Configuration hadoopConf = DEFAULT_COMPAT.getContextConfiguration(context); ModifiableHadoopConfiguration scanConf = ModifiableHadoopConfiguration.of(TitanHadoopConfiguration.MAPRED_NS, hadoopConf); job = getJob(scanConf); metrics = new HadoopContextScanMetrics(context); Configuration graphConf = getTitanConfiguration(context); finishSetup(scanConf, graphConf); }
@Override protected void setup(Context context) throws IOException, InterruptedException { super.setup(context); filter = PcapFilters.valueOf(context.getConfiguration().get(PcapFilterConfigurator.PCAP_FILTER_NAME_CONF)).create(); filter.configure(context.getConfiguration()); start = Long.parseUnsignedLong(context.getConfiguration().get(START_TS_CONF)); end = Long.parseUnsignedLong(context.getConfiguration().get(END_TS_CONF)); }
@Override protected void setup(final Context context) throws IOException, InterruptedException { super.setup(context); final Configuration configuration = context.getConfiguration(); try { indxTblColumnMetadata = PhoenixConfigurationUtil.getUpsertColumnMetadataList(context.getConfiguration()); indxWritable.setColumnMetadata(indxTblColumnMetadata); preUpdateProcessor = PhoenixConfigurationUtil.loadPreUpsertProcessor(configuration); indexTableName = PhoenixConfigurationUtil.getPhysicalTableName(configuration); final Properties overrideProps = new Properties (); String scn = configuration.get(PhoenixConfigurationUtil.CURRENT_SCN_VALUE); String txScnValue = configuration.get(PhoenixConfigurationUtil.TX_SCN_VALUE); if(txScnValue==null) { overrideProps.put(PhoenixRuntime.BUILD_INDEX_AT_ATTRIB, scn); } connection = ConnectionUtil.getOutputConnection(configuration,overrideProps); connection.setAutoCommit(false); final String upsertQuery = PhoenixConfigurationUtil.getUpsertStatement(configuration); this.pStatement = connection.prepareStatement(upsertQuery); } catch (SQLException e) { throw new RuntimeException(e.getMessage()); } }
@Override protected void setup(final Context context) throws IOException, InterruptedException { super.setup(context); final Configuration configuration = context.getConfiguration(); writer = new DirectHTableWriter(configuration);
@Override protected void setup(final Context context) throws IOException, InterruptedException { super.setup(context); final Configuration configuration = context.getConfiguration(); try {
@Override protected void setup(Context context) throws IOException, InterruptedException { super.setup(context); this.tracing = LOG.isTraceEnabled(); }
@Override protected void setup(Context context) throws IOException, InterruptedException { super.setup(context); this.tracing = LOG.isTraceEnabled(); }
@Override protected void setup(Context context) throws IOException, InterruptedException { super.setup(context); this.tracing = LOG.isTraceEnabled(); }
@Override protected void setup(Context context) throws IOException, InterruptedException { super.setup(context); this.tracing = LOG.isTraceEnabled(); }
@Override protected void setup(Context context) throws IOException, InterruptedException { super.setup(context); this.tracing = LOG.isTraceEnabled(); }
@Override protected void setup(Context context) throws IOException, InterruptedException { super.setup(context); this.tracing = LOG.isTraceEnabled(); }
@Override protected void setup(Context context) throws IOException, InterruptedException { super.setup(context); this.invert = context.getConfiguration().getBoolean(RdfMapReduceConstants.FILTER_INVERT, this.invert); }
@Override protected void setup(Context context) throws IOException, InterruptedException { super.setup(context); Configuration conf = context.getConfiguration(); this.maxShingleSize = conf.getInt(MAX_SHINGLE_SIZE, DEFAULT_MAX_SHINGLE_SIZE); this.emitUnigrams = conf.getBoolean(CollocDriver.EMIT_UNIGRAMS, CollocDriver.DEFAULT_EMIT_UNIGRAMS); if (log.isInfoEnabled()) { log.info("Max Ngram size is {}", this.maxShingleSize); log.info("Emit Unitgrams is {}", emitUnigrams); } }
@Override protected void setup(Context context) throws IOException, InterruptedException { super.setup(context); String analyzerClassName = context.getConfiguration().get(DocumentProcessor.ANALYZER_CLASS, StandardAnalyzer.class.getName()); try { analyzer = AnalyzerUtils.createAnalyzer(analyzerClassName); } catch (ClassNotFoundException e) { throw new IOException("Unable to create analyzer: " + analyzerClassName, e); } } }
@Override protected void setup(Context context) throws IOException, InterruptedException { super.setup(context); log.info("Input split = {}", context.getInputSplit()); }
@Override protected void setup(Context context) throws IOException, InterruptedException { Configuration conf = context.getConfiguration(); String priorClustersPath = conf.get(ClusterIterator.PRIOR_PATH_KEY); classifier = new ClusterClassifier(); classifier.readFromSeqFiles(conf, new Path(priorClustersPath)); policy = classifier.getPolicy(); policy.update(classifier); super.setup(context); }