private String getPartitionFilterString() { if (partitionFilterString == null) { Properties props = UDFContext.getUDFContext().getUDFProperties( this.getClass(), new String[]{signature}); partitionFilterString = props.getProperty(PARTITION_FILTER); } return partitionFilterString; }
protected void storeInUDFContext(String signature, String key, Object value) { UDFContext udfContext = UDFContext.getUDFContext(); Properties props = udfContext.getUDFProperties( this.getClass(), new String[]{signature}); props.put(key, value); }
private Properties getUDFProperties() { return UDFContext.getUDFContext().getUDFProperties(getClass(), new String[] { signature }); }
@Override public void prepareToWrite(RecordWriter writer) throws IOException { this.writer = writer; computedSchema = (HCatSchema) ObjectSerializer.deserialize(UDFContext.getUDFContext().getUDFProperties(this.getClass(), new String[]{sign}).getProperty(COMPUTED_OUTPUT_SCHEMA)); }
HCatSchema getHCatSchema(List<RequiredField> fields, String signature, Class<?> classForUDFCLookup) throws IOException { if (fields == null) { return null; } Properties props = UDFContext.getUDFContext().getUDFProperties( classForUDFCLookup, new String[]{signature}); HCatSchema hcatTableSchema = (HCatSchema) props.get(HCatConstants.HCAT_TABLE_SCHEMA); ArrayList<HCatFieldSchema> fcols = new ArrayList<HCatFieldSchema>(); for (RequiredField rf : fields) { fcols.add(hcatTableSchema.getFields().get(rf.getIndex())); } return new HCatSchema(fcols); }
/** * @param optString may empty str (not null), in which case it's no-op */ public HCatStorer(String partSpecs, String pigSchema, String optString) throws Exception { super(partSpecs, pigSchema); String[] optsArr = optString.split(" "); CommandLine configuredOptions; try { configuredOptions = parser.parse(validOptions, optsArr); } catch (ParseException e) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp( "[-" + ON_OOR_VALUE_OPT + "]", validOptions ); throw e; } Properties udfProps = UDFContext.getUDFContext().getUDFProperties(this.getClass(), new String[]{sign}); //downstream code expects it to be set to a valid value udfProps.put(ON_OORA_VALUE_PROP, configuredOptions.getOptionValue(ON_OOR_VALUE_OPT, getDefaultValue().name())); if(LOG.isDebugEnabled()) { LOG.debug("setting " + configuredOptions.getOptionValue(ON_OOR_VALUE_OPT)); } isValidOOROption((String)udfProps.get(ON_OORA_VALUE_PROP)); } public HCatStorer(String partSpecs, String pigSchema) throws Exception {
public HCatBaseStorer(String partSpecs, String schema) throws Exception { partitionKeys = new ArrayList<String>(); partitions = new HashMap<String, String>(); if (partSpecs != null && !partSpecs.trim().isEmpty()) { String[] partKVPs = partSpecs.split(","); for (String partKVP : partKVPs) { String[] partKV = partKVP.split("="); if (partKV.length == 2) { String partKey = partKV[0].trim(); partitionKeys.add(partKey); partitions.put(partKey, partKV[1].trim()); } else { throw new FrontendException("Invalid partition column specification. " + partSpecs, PigHCatUtil.PIG_EXCEPTION_CODE); } } } if (schema != null && !schema.trim().isEmpty()) { pigSchema = Utils.getSchemaFromString(schema); } Properties udfProps = UDFContext.getUDFContext().getUDFProperties(this.getClass(), new String[]{sign}); onOutOfRange = OOR_VALUE_OPT_VALUES.valueOf(udfProps.getProperty(ON_OORA_VALUE_PROP, getDefaultValue().name())); } static OOR_VALUE_OPT_VALUES getDefaultValue() {
@Override public void checkSchema(ResourceSchema resourceSchema) throws IOException { /* Schema provided by user and the schema computed by Pig * at the time of calling store must match. */ Schema runtimeSchema = Schema.getPigSchema(resourceSchema); if (pigSchema != null) { if (!Schema.equals(runtimeSchema, pigSchema, false, true)) { throw new FrontendException("Schema provided in store statement doesn't match with the Schema" + "returned by Pig run-time. Schema provided in HCatStorer: " + pigSchema.toString() + " Schema received from Pig runtime: " + runtimeSchema.toString(), PigHCatUtil.PIG_EXCEPTION_CODE); } } else { pigSchema = runtimeSchema; } UDFContext.getUDFContext().getUDFProperties(this.getClass(), new String[]{sign}).setProperty(PIG_SCHEMA, ObjectSerializer.serialize(pigSchema)); }
.setBoolean(HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, true); UDFContext udfContext = UDFContext.getUDFContext(); Properties udfProps = udfContext.getUDFProperties(this.getClass(), new String[]{signature});
Configuration config = job.getConfiguration(); config.set(INNER_SIGNATURE, INNER_SIGNATURE_PREFIX + "_" + sign); Properties udfProps = UDFContext.getUDFContext().getUDFProperties( this.getClass(), new String[]{sign}); String[] userStr = location.split("\\.");
Properties props = UDFContext.getUDFContext().getClientSystemProps(); String innerTupleName = HCatConstants.HCAT_PIG_INNER_TUPLE_NAME_DEFAULT; if (props != null && props.containsKey(HCatConstants.HCAT_PIG_INNER_TUPLE_NAME)) {
UDFContext udfContext = UDFContext.getUDFContext(); Properties udfProps = udfContext.getUDFProperties(this.getClass(), new String[]{signature});
private void storeInUDFContext(final String signature,final String key,final String value) { final UDFContext udfContext = UDFContext.getUDFContext(); final Properties props = udfContext.getUDFProperties(this.getClass(), new String[]{signature}); props.put(key, value); }
/** * Returns UDFProperties based on <code>contextSignature</code>. */ private Properties getUDFProperties() { return UDFContext.getUDFContext().getUDFProperties(this.getClass(), new String[] { contextSignature }); }
/** * Returns UDFProperties based on <code>contextSignature</code>. */ private Properties getUDFProperties() { return UDFContext.getUDFContext().getUDFProperties(this.getClass(), new String[] { contextSignature }); }
private String getValueFromUDFContext(final String signature,final String key) { final UDFContext udfContext = UDFContext.getUDFContext(); final Properties props = udfContext.getUDFProperties(this.getClass(), new String[]{signature}); return props.getProperty(key); } }
private void initConnection() throws IOException { // Create correct configuration to be used to make phoenix connections UDFContext context = UDFContext.getUDFContext(); configuration = new Configuration(context.getJobConf()); configuration.set(HConstants.ZOOKEEPER_QUORUM, this.zkQuorum); if (Strings.isNullOrEmpty(tenantId)) { configuration.unset(PhoenixRuntime.TENANT_ID_ATTRIB); } else { configuration.set(PhoenixRuntime.TENANT_ID_ATTRIB, tenantId); } try { connection = ConnectionUtil.getOutputConnection(configuration); } catch (SQLException e) { throw new IOException("Caught exception while creating connection", e); } }
private void createUdfContext() { udfContext = UDFContext.getUDFContext(); udfContext.addJobConf(conf); }
public static void resetUDFContextForThreadReuse() { // On the Tez AM, MROutput OutputCommitters are initialized and setupJob // called on them in a loop in the same thread. // commitJob/abortJob can be called from any thread based on events received from vertices // On the Tez tasks, it initializes different inputs/outputs in different Initializer threads // by submitting them to a thread pool. Even though threadpoolsize=numInputs+numOutputs // a thread can be reused. // Since deserialized UDFContext from input and output payload contains // information only for that input or output reduce payload sizes, we need to // ensure it is deserialized everytime before use in a thread to get the right one. UDFContext.getUDFContext().reset(); }
private void init() { // The decorators work is mainly on backend only so not creating error // handler on frontend if (UDFContext.getUDFContext().isFrontend()) { return; } if (storer instanceof ErrorHandling && allowErrors()) { errorHandler = ((ErrorHandling) storer).getErrorHandler(); shouldHandleErrors = true; } }