public FosterStorageHandler(String ifName, String ofName, String serdeName) throws ClassNotFoundException { this((Class<? extends InputFormat>) JavaUtils.loadClass(ifName), (Class<? extends OutputFormat>) JavaUtils.loadClass(ofName), (Class<? extends AbstractSerDe>) JavaUtils.loadClass(serdeName)); }
public static Class loadClass(String shadePrefix, String className) throws ClassNotFoundException { return loadClass(shadePrefix + "." + className); }
public static Class loadClass(String className) throws ClassNotFoundException { return loadClass(className, true); }
private static MessageFactory getInstance(String className) { try { return (MessageFactory)ReflectionUtils.newInstance(JavaUtils.loadClass(className), hiveConf); } catch (ClassNotFoundException classNotFound) { throw new IllegalStateException("Could not construct MessageFactory implementation: ", classNotFound); } }
private static Class<?> loadClass(String className, @Nullable Configuration configuration) throws Exception { if (configuration != null) { return configuration.getClassByName(className); } return JavaUtils.loadClass(className); }
@SuppressWarnings("unchecked") private AcidOutputFormat<?, ?> createOutputFormat(String outputFormatName, HiveConf configuration) throws WorkerException { try { return (AcidOutputFormat<?, ?>) ReflectionUtils.newInstance(JavaUtils.loadClass(outputFormatName), configuration); } catch (ClassNotFoundException e) { throw new WorkerException("Could not locate class for '" + outputFormatName + "'.", e); } }
private TableFunctionResolver constructResolver(String className) throws HiveException { try { @SuppressWarnings("unchecked") Class<? extends TableFunctionResolver> rCls = (Class<? extends TableFunctionResolver>) JavaUtils.loadClass(className); return ReflectionUtils.newInstance(rCls, null); } catch (Exception e) { throw new HiveException(e); } }
private TableFunctionResolver constructResolver(String className) throws HiveException { try { @SuppressWarnings("unchecked") Class<? extends TableFunctionResolver> rCls = (Class<? extends TableFunctionResolver>) JavaUtils.loadClass(className); return ReflectionUtils.newInstance(rCls, null); } catch (Exception e) { throw new HiveException(e); } }
private MapJoinPersistableTableContainer create( String name, Map<String, String> metaData) throws HiveException { try { @SuppressWarnings("unchecked") Class<? extends MapJoinPersistableTableContainer> clazz = (Class<? extends MapJoinPersistableTableContainer>) JavaUtils.loadClass(name); Constructor<? extends MapJoinPersistableTableContainer> constructor = clazz.getDeclaredConstructor(Map.class); return constructor.newInstance(metaData); } catch (Exception e) { String msg = "Error while attempting to create table container" + " of type: " + name + ", with metaData: " + metaData; throw new HiveException(msg, e); } }
/** * Get an instance of the selected storage class. Defaults to * HDFS storage if none is specified. */ public static TempletonStorage getStorageInstance(Configuration conf) { TempletonStorage storage = null; try { storage = (TempletonStorage) JavaUtils.loadClass(conf.get(TempletonStorage.STORAGE_CLASS)) .newInstance(); } catch (Exception e) { LOG.warn("No storage method found: " + e.getMessage()); try { storage = new HDFSStorage(); } catch (Exception ex) { LOG.error("Couldn't create storage."); } } return storage; }
private MapJoinPersistableTableContainer create( String name, Map<String, String> metaData) throws HiveException { try { @SuppressWarnings("unchecked") Class<? extends MapJoinPersistableTableContainer> clazz = (Class<? extends MapJoinPersistableTableContainer>) JavaUtils.loadClass(name); Constructor<? extends MapJoinPersistableTableContainer> constructor = clazz.getDeclaredConstructor(Map.class); return constructor.newInstance(metaData); } catch (Exception e) { String msg = "Error while attempting to create table container" + " of type: " + name + ", with metaData: " + metaData; throw new HiveException(msg, e); } }
try { log.debug("Looking for: {}", hadoopJarFinder); jarFinder = JavaUtils.loadClass(hadoopJarFinder); log.debug("Found: {}", hadoopJarFinder); Method getJar = jarFinder.getMethod("getJar", Class.class);
private static <T> T instantiate(Class<T> classType, String classname) throws IOException { T t = null; try { Class c = JavaUtils.loadClass(classname); Object o = c.newInstance(); if (classType.isAssignableFrom(o.getClass())) { t = (T)o; } else { String s = classname + " is not an instance of " + classType.getName(); LOG.error(s); throw new IOException(s); } } catch (ClassNotFoundException e) { LOG.error("Unable to instantiate class, " + StringUtils.stringifyException(e)); throw new IOException(e); } catch (InstantiationException e) { LOG.error("Unable to instantiate class, " + StringUtils.stringifyException(e)); throw new IOException(e); } catch (IllegalAccessException e) { LOG.error("Unable to instantiate class, " + StringUtils.stringifyException(e)); throw new IOException(e); } return t; }
public static CompressionCodec createCodec(CompressionKind kind) { switch (kind) { case NONE: return null; case ZLIB: return new ZlibCodec(); case SNAPPY: return new SnappyCodec(); case LZO: try { Class<? extends CompressionCodec> lzo = (Class<? extends CompressionCodec>) JavaUtils.loadClass("org.apache.hadoop.hive.ql.io.orc.LzoCodec"); return lzo.newInstance(); } catch (ClassNotFoundException e) { throw new IllegalArgumentException("LZO is not available.", e); } catch (InstantiationException e) { throw new IllegalArgumentException("Problem initializing LZO", e); } catch (IllegalAccessException e) { throw new IllegalArgumentException("Insufficient access to LZO", e); } default: throw new IllegalArgumentException("Unknown compression codec: " + kind); } }
@SuppressWarnings("unchecked") @Override public void readFields(DataInput input) throws IOException { String partitionInfoString = WritableUtils.readString(input); partitionInfo = (PartInfo) HCatUtil.deserialize(partitionInfoString); String baseSplitClassName = WritableUtils.readString(input); org.apache.hadoop.mapred.InputSplit split; try { Class<? extends org.apache.hadoop.mapred.InputSplit> splitClass = (Class<? extends org.apache.hadoop.mapred.InputSplit>) JavaUtils.loadClass(baseSplitClassName); //Class.forName().newInstance() does not work if the underlying //InputSplit has package visibility Constructor<? extends org.apache.hadoop.mapred.InputSplit> constructor = splitClass.getDeclaredConstructor(new Class[]{}); constructor.setAccessible(true); split = constructor.newInstance(); // read baseSplit from input ((Writable) split).readFields(input); this.baseMapRedSplit = split; } catch (Exception e) { throw new IOException("Exception from " + baseSplitClassName, e); } }
/** * Return the types for the composite key. * * @param tbl Properties for the table * @return a comma-separated list of composite key types * @throws SerDeException if something goes wrong while getting the composite key parts * */ @SuppressWarnings("unchecked") private static Map<String, String> getCompositeKeyParts(Properties tbl) throws SerDeException { String compKeyClassName = tbl.getProperty(HBaseSerDe.HBASE_COMPOSITE_KEY_CLASS); if (compKeyClassName == null) { // no custom composite key class provided. return null return null; } CompositeHBaseKeyFactory<HBaseCompositeKey> keyFactory = null; Class<?> keyClass; try { keyClass = JavaUtils.loadClass(compKeyClassName); keyFactory = new CompositeHBaseKeyFactory(keyClass); } catch (Exception e) { throw new SerDeException(e); } HBaseCompositeKey compKey = keyFactory.createKey(null); return compKey.getParts(); } }
private Class<?> getInputFormat(JobConf jobConf, MapWork mWork) throws HiveException { // MergeFileWork is sub-class of MapWork, we don't need to distinguish here if (mWork.getInputformat() != null) { HiveConf.setVar(jobConf, HiveConf.ConfVars.HIVEINPUTFORMAT, mWork.getInputformat()); } String inpFormat = HiveConf.getVar(jobConf, HiveConf.ConfVars.HIVEINPUTFORMAT); if (mWork.isUseBucketizedHiveInputFormat()) { inpFormat = BucketizedHiveInputFormat.class.getName(); } Class inputFormatClass; try { inputFormatClass = JavaUtils.loadClass(inpFormat); } catch (ClassNotFoundException e) { String message = "Failed to load specified input format class:" + inpFormat; LOG.error(message, e); throw new HiveException(message, e); } return inputFormatClass; }
/** * Auto-generate the avro struct from class * * @param serClassName serialization class for avro struct * @param sb StringBuilder to hold the generated struct * @throws SerDeException if something goes wrong while generating the struct * */ private static void generateAvroStructFromClass(String serClassName, StringBuilder sb) throws SerDeException { Class<?> serClass; try { serClass = JavaUtils.loadClass(serClassName); } catch (ClassNotFoundException e) { throw new SerDeException("Error obtaining descriptor for " + serClassName, e); } Schema schema = ReflectData.get().getSchema(serClass); generateAvroStructFromSchema(schema, sb); }
public static RecordWriter getHiveRecordWriter(JobConf jc, TableDesc tableInfo, Class<? extends Writable> outputClass, FileSinkDesc conf, Path outPath, Reporter reporter) throws HiveException { HiveOutputFormat<?, ?> hiveOutputFormat = getHiveOutputFormat(jc, tableInfo); try { boolean isCompressed = conf.getCompressed(); JobConf jc_output = jc; if (isCompressed) { jc_output = new JobConf(jc); String codecStr = conf.getCompressCodec(); if (codecStr != null && !codecStr.trim().equals("")) { Class<? extends CompressionCodec> codec = (Class<? extends CompressionCodec>) JavaUtils.loadClass(codecStr); FileOutputFormat.setOutputCompressorClass(jc_output, codec); } String type = conf.getCompressType(); if (type != null && !type.trim().equals("")) { CompressionType style = CompressionType.valueOf(type); SequenceFileOutputFormat.setOutputCompressionType(jc, style); } } return getRecordWriter(jc_output, hiveOutputFormat, outputClass, isCompressed, tableInfo.getProperties(), outPath, reporter); } catch (Exception e) { throw new HiveException(e); } }
private Class<?> getInputFormat(JobConf jobConf, MapWork mWork) throws HiveException { // MergeFileWork is sub-class of MapWork, we don't need to distinguish here if (mWork.getInputformat() != null) { HiveConf.setVar(jobConf, HiveConf.ConfVars.HIVEINPUTFORMAT, mWork.getInputformat()); } String inpFormat = HiveConf.getVar(jobConf, HiveConf.ConfVars.HIVEINPUTFORMAT); if (mWork.isUseBucketizedHiveInputFormat()) { inpFormat = BucketizedHiveInputFormat.class.getName(); } Class inputFormatClass; try { inputFormatClass = JavaUtils.loadClass(inpFormat); } catch (ClassNotFoundException e) { String message = "Failed to load specified input format class:" + inpFormat; LOG.error(message, e); throw new HiveException(message, e); } return inputFormatClass; }