static AbstractSerDe createDelegate(String className) { final Class<? extends AbstractSerDe> clazz; try { //noinspection unchecked clazz = (Class<? extends AbstractSerDe>) Class.forName(className); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } // we are not setting conf thus null is okay return ReflectionUtil.newInstance(clazz, null); }
private static HiveOutputFormat<?, ?> getHiveOutputFormat( Configuration conf, Class<? extends OutputFormat> outputClass) throws HiveException { OutputFormat<?, ?> outputFormat = ReflectionUtil.newInstance(outputClass, conf); if (!(outputFormat instanceof HiveOutputFormat)) { outputFormat = new HivePassThroughOutputFormat(outputFormat); } return (HiveOutputFormat<?, ?>) outputFormat; }
private static HiveOutputFormat<?, ?> getHiveOutputFormat( Configuration conf, Class<? extends OutputFormat> outputClass) throws HiveException { OutputFormat<?, ?> outputFormat = ReflectionUtil.newInstance(outputClass, conf); if (!(outputFormat instanceof HiveOutputFormat)) { outputFormat = new HivePassThroughOutputFormat(outputFormat); } return (HiveOutputFormat<?, ?>) outputFormat; }
@Override public void readFields(DataInput in) throws IOException { String inputSplitClassName = in.readUTF(); try { inputSplit = (InputSplit) ReflectionUtil.newInstance(conf .getClassByName(inputSplitClassName), conf); } catch (Exception e) { throw new IOException( "Cannot create an instance of InputSplit class = " + inputSplitClassName + ":" + e.getMessage(), e); } inputSplit.readFields(in); inputFormatClassName = in.readUTF(); }
@Override public void readFields(DataInput in) throws IOException { String inputSplitClassName = in.readUTF(); int numSplits = in.readInt(); inputSplits = new InputSplit[numSplits]; for (int i = 0; i < numSplits; i++) { try { inputSplits[i] = (InputSplit) ReflectionUtil.newInstance(conf .getClassByName(inputSplitClassName), conf); } catch (Exception e) { throw new IOException( "Cannot create an instance of InputSplit class = " + inputSplitClassName + ":" + e.getMessage()); } inputSplits[i].readFields(in); } inputFormatClassName = in.readUTF(); }
/** * Check if the given serde is valid. */ public static void validateSerDe(String serdeName, HiveConf conf) throws HiveException { try { Deserializer d = ReflectionUtil.newInstance(conf.getClassByName(serdeName). asSubclass(Deserializer.class), conf); if (d != null) { LOG.debug("Found class for {}", serdeName); } } catch (Exception e) { throw new HiveException("Cannot validate serde: " + serdeName, e); } }
public Deserializer getDeserializer(Configuration conf, boolean ignoreError) throws Exception { Deserializer de = ReflectionUtil.newInstance( getDeserializerClass().asSubclass(Deserializer.class), conf); if (ignoreError) { SerDeUtils.initializeSerDeWithoutErrorCheck(de, conf, properties, null); } else { SerDeUtils.initializeSerDe(de, conf, properties, null); } return de; }
public Deserializer getDeserializer(Configuration conf, boolean ignoreError) throws Exception { Deserializer de = ReflectionUtil.newInstance( getDeserializerClass().asSubclass(Deserializer.class), conf); if (ignoreError) { SerDeUtils.initializeSerDeWithoutErrorCheck(de, conf, properties, null); } else { SerDeUtils.initializeSerDe(de, conf, properties, null); } return de; }
private FunctionInfo registerGenericUDF(String functionName, FunctionType functionType, Class<? extends GenericUDF> genericUDFClass, FunctionResource... resources) { validateClass(genericUDFClass, GenericUDF.class); FunctionInfo fI = new FunctionInfo(functionType, functionName, ReflectionUtil.newInstance(genericUDFClass, null), resources); addFunction(functionName, fI); return fI; }
private FunctionInfo registerGenericUDTF(String functionName, FunctionType functionType, Class<? extends GenericUDTF> genericUDTFClass, FunctionResource... resources) { validateClass(genericUDTFClass, GenericUDTF.class); FunctionInfo fI = new FunctionInfo(functionType, functionName, ReflectionUtil.newInstance(genericUDTFClass, null), resources); addFunction(functionName, fI); return fI; }
private FunctionInfo registerGenericUDF(String functionName, FunctionType functionType, Class<? extends GenericUDF> genericUDFClass, FunctionResource... resources) { validateClass(genericUDFClass, GenericUDF.class); FunctionInfo fI = new FunctionInfo(functionType, functionName, ReflectionUtil.newInstance(genericUDFClass, null), resources); addFunction(functionName, fI); return fI; }
public static String getFileExtension(JobConf conf, StorageFormat storageFormat) { // text format files must have the correct extension when compressed if (!HiveConf.getBoolVar(conf, COMPRESSRESULT) || !HiveIgnoreKeyTextOutputFormat.class.getName().equals(storageFormat.getOutputFormat())) { return ""; } String compressionCodecClass = conf.get("mapred.output.compression.codec"); if (compressionCodecClass == null) { return new DefaultCodec().getDefaultExtension(); } try { Class<? extends CompressionCodec> codecClass = conf.getClassByName(compressionCodecClass).asSubclass(CompressionCodec.class); return ReflectionUtil.newInstance(codecClass, conf).getDefaultExtension(); } catch (ClassNotFoundException e) { throw new PrestoException(HIVE_UNSUPPORTED_FORMAT, "Compression codec not found: " + compressionCodecClass, e); } catch (RuntimeException e) { throw new PrestoException(HIVE_UNSUPPORTED_FORMAT, "Failed to load compression codec: " + compressionCodecClass, e); } }
private FunctionInfo registerGenericUDTF(String functionName, FunctionType functionType, Class<? extends GenericUDTF> genericUDTFClass, FunctionResource... resources) { validateClass(genericUDTFClass, GenericUDTF.class); FunctionInfo fI = new FunctionInfo(functionType, functionName, ReflectionUtil.newInstance(genericUDTFClass, null), resources); addFunction(functionName, fI); return fI; }
/** * Return a deserializer object corresponding to the partitionDesc. */ public Deserializer getDeserializer(Configuration conf) throws Exception { Properties schema = getProperties(); String clazzName = getDeserializerClassName(); Deserializer deserializer = ReflectionUtil.newInstance(conf.getClassByName(clazzName) .asSubclass(Deserializer.class), conf); SerDeUtils.initializeSerDe(deserializer, conf, getTableDesc().getProperties(), schema); return deserializer; }
/** * Return a deserializer object corresponding to the partitionDesc. */ public Deserializer getDeserializer(Configuration conf) throws Exception { Properties schema = getProperties(); String clazzName = getDeserializerClassName(); Deserializer deserializer = ReflectionUtil.newInstance(conf.getClassByName(clazzName) .asSubclass(Deserializer.class), conf); SerDeUtils.initializeSerDe(deserializer, conf, getTableDesc().getProperties(), schema); return deserializer; }
public static AbstractSerDe getSpillSerDe(byte alias, TableDesc[] spillTableDesc, JoinDesc conf, boolean noFilter) { TableDesc desc = getSpillTableDesc(alias, spillTableDesc, conf, noFilter); if (desc == null) { return null; } AbstractSerDe sd = (AbstractSerDe) ReflectionUtil.newInstance(desc.getDeserializerClass(), null); try { SerDeUtils.initializeSerDe(sd, null, desc.getProperties(), null); } catch (SerDeException e) { e.printStackTrace(); return null; } return sd; }
private FunctionInfo registerUDAF(String functionName, FunctionType functionType, Class<? extends UDAF> udafClass, FunctionResource... resources) { validateClass(udafClass, UDAF.class); FunctionInfo function = new WindowFunctionInfo(functionType, functionName, new GenericUDAFBridge(ReflectionUtil.newInstance(udafClass, null)), resources); addFunction(functionName, function); addFunction(WINDOW_FUNC_PREFIX + functionName, function); return function; }
@Explain(displayName = "keyContext", explainLevels = { Level.DEBUG }) public String getDebugKeyContext() { MapJoinObjectSerDeContext keyContext; try { AbstractSerDe keySerde = (AbstractSerDe) ReflectionUtil.newInstance( keyTblDesc.getDeserializerClass(), null); SerDeUtils.initializeSerDe(keySerde, null, keyTblDesc.getProperties(), null); keyContext = new MapJoinObjectSerDeContext(keySerde, false); } catch (SerDeException e) { return null; } return keyContext.stringify(); }
private FunctionInfo registerUDAF(String functionName, FunctionType functionType, Class<? extends UDAF> udafClass, FunctionResource... resources) { validateClass(udafClass, UDAF.class); FunctionInfo function = new WindowFunctionInfo(functionType, functionName, new GenericUDAFBridge(ReflectionUtil.newInstance(udafClass, null)), resources); addFunction(functionName, function); addFunction(WINDOW_FUNC_PREFIX + functionName, function); return function; }
private void setUpFetchContexts(String alias, MergeQueue mergeQueue) throws HiveException { mergeQueue.clearFetchContext(); Path currentInputPath = getExecContext().getCurrentInputPath(); BucketMapJoinContext bucketMatcherCxt = localWork.getBucketMapjoinContext(); Class<? extends BucketMatcher> bucketMatcherCls = bucketMatcherCxt.getBucketMatcherClass(); BucketMatcher bucketMatcher = ReflectionUtil.newInstance(bucketMatcherCls, null); getExecContext().setFileId(bucketMatcherCxt.createFileId(currentInputPath.toString())); if (LOG.isInfoEnabled()) { LOG.info("set task id: " + getExecContext().getFileId()); } bucketMatcher.setAliasBucketFileNameMapping(bucketMatcherCxt .getAliasBucketFileNameMapping()); List<Path> aliasFiles = bucketMatcher.getAliasBucketFiles(currentInputPath.toString(), bucketMatcherCxt.getMapJoinBigTableAlias(), alias); mergeQueue.setupContext(aliasFiles); }