/** * @param inputFormatClass * @param outputFormatClass * @param properties must contain serde class name associate with this table. */ public TableDesc( final Class<? extends InputFormat> inputFormatClass, final Class<?> outputFormatClass, final Properties properties) { this.inputFileFormatClass = inputFormatClass; outputFileFormatClass = HiveFileFormatUtils .getOutputFormatSubstitute(outputFormatClass); setProperties(properties); }
/** * @param inputFormatClass * @param outputFormatClass * @param properties must contain serde class name associate with this table. */ public TableDesc( final Class<? extends InputFormat> inputFormatClass, final Class<?> outputFormatClass, final Properties properties) { this.inputFileFormatClass = inputFormatClass; outputFileFormatClass = HiveFileFormatUtils .getOutputFormatSubstitute(outputFormatClass); setProperties(properties); }
@Override public Object clone() { TableDesc ret = new TableDesc(); ret.setInputFileFormatClass(inputFileFormatClass); ret.setOutputFileFormatClass(outputFileFormatClass); Properties newProp = new Properties(); Enumeration<Object> keysProp = properties.keys(); while (keysProp.hasMoreElements()) { Object key = keysProp.nextElement(); newProp.put(key, properties.get(key)); } ret.setProperties(newProp); if (jobProperties != null) { ret.jobProperties = new LinkedHashMap<String, String>(jobProperties); } return ret; }
@Override public Object clone() { TableDesc ret = new TableDesc(); ret.setInputFileFormatClass(inputFileFormatClass); ret.setOutputFileFormatClass(outputFileFormatClass); Properties newProp = new Properties(); Enumeration<Object> keysProp = properties.keys(); while (keysProp.hasMoreElements()) { Object key = keysProp.nextElement(); newProp.put(key, properties.get(key)); } ret.setProperties(newProp); if (jobProperties != null) { ret.jobProperties = new LinkedHashMap<String, String>(jobProperties); } return ret; }
/** * Utility method which gets table or partition {@link InputFormat} class. First it * tries to get the class name from given StorageDescriptor object. If it doesn't contain it tries to get it from * StorageHandler class set in table properties. If not found throws an exception. * @param job {@link JobConf} instance needed incase the table is StorageHandler based table. * @param sd {@link StorageDescriptor} instance of currently reading partition or table (for non-partitioned tables). * @param table Table object */ public static Class<? extends InputFormat<?, ?>> getInputFormatClass(final JobConf job, final StorageDescriptor sd, final Table table) throws Exception { final String inputFormatName = sd.getInputFormat(); if (Strings.isNullOrEmpty(inputFormatName)) { final String storageHandlerClass = table.getParameters().get(META_TABLE_STORAGE); if (Strings.isNullOrEmpty(storageHandlerClass)) { throw new ExecutionSetupException("Unable to get Hive table InputFormat class. There is neither " + "InputFormat class explicitly specified nor StorageHandler class"); } final HiveStorageHandler storageHandler = HiveUtils.getStorageHandler(job, storageHandlerClass); TableDesc tableDesc = new TableDesc(); tableDesc.setProperties(MetaStoreUtils.getTableMetadata(table)); storageHandler.configureInputJobProperties(tableDesc, table.getParameters()); return (Class<? extends InputFormat<?, ?>>) storageHandler.getInputFormatClass(); } else { return (Class<? extends InputFormat<?, ?>>) Class.forName(inputFormatName); } }
rs.setConf(new ReduceSinkDesc()); TableDesc tableDesc = new TableDesc(); tableDesc.setProperties(new Properties()); rs.getConf().setKeySerializeInfo(tableDesc); ts = new TableScanOperator(cCtx);
@Override public Object clone() { TableDesc ret = new TableDesc(); ret.setInputFileFormatClass(inputFileFormatClass); ret.setOutputFileFormatClass(outputFileFormatClass); Properties newProp = new Properties(); Enumeration<Object> keysProp = properties.keys(); while (keysProp.hasMoreElements()) { Object key = keysProp.nextElement(); newProp.put(key, properties.get(key)); } ret.setProperties(newProp); if (jobProperties != null) { ret.jobProperties = new LinkedHashMap<String, String>(jobProperties); } return ret; }
@Override public Object clone() { TableDesc ret = new TableDesc(); ret.setSerdeClassName(serdeClassName); ret.setDeserializerClass(deserializerClass); ret.setInputFileFormatClass(inputFileFormatClass); ret.setOutputFileFormatClass(outputFileFormatClass); Properties newProp = new Properties(); Enumeration<Object> keysProp = properties.keys(); while (keysProp.hasMoreElements()) { Object key = keysProp.nextElement(); newProp.put(key, properties.get(key)); } ret.setProperties(newProp); if (jobProperties != null) { ret.jobProperties = new LinkedHashMap<String, String>(jobProperties); } return ret; } }
Properties target = tblDesc.getProperties(); if (target == null) { tblDesc.setProperties(target = new Properties());