public Class<? extends OutputFormat> getOutputFileFormatClass() { if (outputFileFormatClass == null && tableDesc != null) { setOutputFileFormatClass(tableDesc.getOutputFileFormatClass()); } return outputFileFormatClass; }
public static HiveOutputFormat<?, ?> getHiveOutputFormat(Configuration conf, TableDesc tableDesc) throws HiveException { return getHiveOutputFormat(conf, tableDesc.getOutputFileFormatClass()); }
@Explain(displayName = "output format") public String getOutputFileFormatClassName() { return getOutputFileFormatClass().getName(); }
public Class<? extends OutputFormat> getOutputFileFormatClass() { if (outputFileFormatClass == null && tableDesc != null) { setOutputFileFormatClass(tableDesc.getOutputFileFormatClass()); } return outputFileFormatClass; }
@Explain(displayName = "output format") public String getOutputFileFormatClassName() { return getOutputFileFormatClass().getName(); }
public static HiveOutputFormat<?, ?> getHiveOutputFormat(Configuration conf, TableDesc tableDesc) throws HiveException { return getHiveOutputFormat(conf, tableDesc.getOutputFileFormatClass()); }
getAcidType(table_desc.getOutputFileFormatClass(), dest) : AcidUtils.Operation.NOT_ACID)); reduceSinkOperatorsAddedByEnforceBucketingSorting.add((ReduceSinkOperator)input.getParentOperators().get(0)); ctx.setMultiFileSpray(multiFileSpray);
AcidUtils.Operation acidOp = Operation.NOT_ACID; if (AcidUtils.isFullAcidTable(dest_tab)) { acidOp = getAcidType(Utilities.getTableDesc(dest_tab).getOutputFileFormatClass(), dest);
HiveConf.getVar(hconf, ConfVars.HIVE_EXECUTION_ENGINE)); if (skipFiles) { Class<?> clazz = conf.getTableInfo().getOutputFileFormatClass(); skipFiles = !StreamingOutputFormat.class.isAssignableFrom(clazz);
HiveConf.getVar(hconf, ConfVars.HIVE_EXECUTION_ENGINE)); if (skipFiles) { Class<?> clazz = conf.getTableInfo().getOutputFileFormatClass(); skipFiles = !StreamingOutputFormat.class.isAssignableFrom(clazz);
AcidUtils.Operation acidOp = AcidUtils.Operation.NOT_ACID; if (destTableIsAcid) { acidOp = getAcidType(table_desc.getOutputFileFormatClass(), dest); checkAcidConstraints(qb, table_desc, dest_tab); AcidUtils.Operation acidOp = AcidUtils.Operation.NOT_ACID; if (destTableIsAcid) { acidOp = getAcidType(table_desc.getOutputFileFormatClass(), dest); checkAcidConstraints(qb, table_desc, dest_tab);
AcidUtils.Operation acidOp = AcidUtils.Operation.NOT_ACID; if (destTableIsFullAcid) { acidOp = getAcidType(tableDescriptor.getOutputFileFormatClass(), dest); AcidUtils.Operation acidOp = AcidUtils.Operation.NOT_ACID; if (destTableIsFullAcid) { acidOp = getAcidType(tableDescriptor.getOutputFileFormatClass(), dest); AcidUtils.Operation acidOp = AcidUtils.Operation.NOT_ACID; if (destTableIsFullAcid) { acidOp = getAcidType(tableDescriptor.getOutputFileFormatClass(), dest);
public Class<? extends OutputFormat> getOutputFileFormatClass() { if (outputFileFormatClass == null && tableDesc != null) { setOutputFileFormatClass(tableDesc.getOutputFileFormatClass()); } return outputFileFormatClass; }
public Class<? extends HiveOutputFormat> getOutputFileFormatClass() { if (outputFileFormatClass == null && tableDesc != null) { setOutputFileFormatClass(tableDesc.getOutputFileFormatClass()); } return outputFileFormatClass; }
public static HiveOutputFormat<?, ?> getHiveOutputFormat(Configuration conf, TableDesc tableDesc) throws HiveException { return getHiveOutputFormat(conf, tableDesc.getOutputFileFormatClass()); }
@Explain(displayName = "output format", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED }) public String getOutputFileFormatClassName() { return getOutputFileFormatClass().getName(); }
@Explain(displayName = "output format") public String getOutputFileFormatClassName() { return getOutputFileFormatClass().getName(); }
public static RecordWriter getHiveRecordWriter(JobConf jc, TableDesc tableInfo, Class<? extends Writable> outputClass, FileSinkDesc conf, Path outPath) throws HiveException { try { HiveOutputFormat<?, ?> hiveOutputFormat = tableInfo .getOutputFileFormatClass().newInstance(); boolean isCompressed = conf.getCompressed(); JobConf jc_output = jc; if (isCompressed) { jc_output = new JobConf(jc); String codecStr = conf.getCompressCodec(); if (codecStr != null && !codecStr.trim().equals("")) { Class<? extends CompressionCodec> codec = (Class<? extends CompressionCodec>) Class .forName(codecStr); FileOutputFormat.setOutputCompressorClass(jc_output, codec); } String type = conf.getCompressType(); if (type != null && !type.trim().equals("")) { CompressionType style = CompressionType.valueOf(type); SequenceFileOutputFormat.setOutputCompressionType(jc, style); } } return getRecordWriter(jc_output, hiveOutputFormat, outputClass, isCompressed, tableInfo.getProperties(), outPath); } catch (Exception e) { throw new HiveException(e); } }
TableDesc tableDesc = work.getAliasToPartnInfo().get(alias).getTableDesc(); props = tableDesc.getProperties(); outFileFormat = tableDesc.getOutputFileFormatClass(); nonNative = tableDesc.isNonNative();
serializer.initialize(null, tableInfo.getProperties()); outputClass = serializer.getSerializedClass(); hiveOutputFormat = conf.getTableInfo().getOutputFileFormatClass().newInstance(); } catch (SerDeException e) { throw new HiveException(e);