public LoadTableDesc(final Path sourcePath, final org.apache.hadoop.hive.ql.plan.TableDesc table, final Map<String, String> partitionSpec, final boolean replace, final AcidUtils.Operation writeType) { super(sourcePath); init(table, partitionSpec, replace, writeType); }
public LoadTableDesc(final Path sourcePath, final org.apache.hadoop.hive.ql.plan.TableDesc table, final DynamicPartitionCtx dpCtx, final AcidUtils.Operation writeType) { super(sourcePath); this.dpCtx = dpCtx; if (dpCtx != null && dpCtx.getPartSpec() != null && partitionSpec == null) { init(table, dpCtx.getPartSpec(), true, writeType); } else { init(table, new LinkedHashMap<String, String>(), true, writeType); } }
public LoadTableDesc(final Path sourcePath, final TableDesc table, final DynamicPartitionCtx dpCtx, final AcidUtils.Operation writeType, boolean isReplace, Long writeId) { super(sourcePath, writeType); if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) { Utilities.FILE_OP_LOGGER.trace("creating LTD from " + sourcePath + " to " + table.getTableName()); } this.dpCtx = dpCtx; LoadFileType lft = isReplace ? LoadFileType.REPLACE_ALL : LoadFileType.OVERWRITE_EXISTING; if (dpCtx != null && dpCtx.getPartSpec() != null && partitionSpec == null) { init(table, dpCtx.getPartSpec(), lft, writeId); } else { init(table, new LinkedHashMap<String, String>(), lft, writeId); } }
public LoadTableDesc(final Path sourcePath, final TableDesc table, final Map<String, String> partitionSpec, final LoadFileType loadFileType, final AcidUtils.Operation writeType, Long currentWriteId) { super(sourcePath, writeType); if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) { Utilities.FILE_OP_LOGGER.trace("creating part LTD from " + sourcePath + " to " + ((table.getProperties() == null) ? "null" : table.getTableName())); } init(table, partitionSpec, loadFileType, currentWriteId); }
public LoadTableDesc(final Path sourcePath, final org.apache.hadoop.hive.ql.plan.TableDesc table, final Map<String, String> partitionSpec, final boolean replace, final AcidUtils.Operation writeType) { super(sourcePath); init(table, partitionSpec, replace, writeType); }
public LoadTableDesc(final String sourceDir, final String tmpDir, final org.apache.hadoop.hive.ql.plan.TableDesc table, final Map<String, String> partitionSpec, final boolean replace) { super(sourceDir); init(sourceDir, tmpDir, table, partitionSpec, replace); }
public LoadTableDesc(final Path sourcePath, final org.apache.hadoop.hive.ql.plan.TableDesc table, final DynamicPartitionCtx dpCtx, final AcidUtils.Operation writeType) { super(sourcePath); this.dpCtx = dpCtx; if (dpCtx != null && dpCtx.getPartSpec() != null && partitionSpec == null) { init(table, dpCtx.getPartSpec(), true, writeType); } else { init(table, new LinkedHashMap<String, String>(), true, writeType); } }
public LoadTableDesc(final String sourceDir, final String tmpDir, final org.apache.hadoop.hive.ql.plan.TableDesc table, final DynamicPartitionCtx dpCtx) { super(sourceDir); this.dpCtx = dpCtx; if (dpCtx != null && dpCtx.getPartSpec() != null && partitionSpec == null) { init(sourceDir, tmpDir, table, dpCtx.getPartSpec(), true); } else { init(sourceDir, tmpDir, table, new LinkedHashMap<String, String>(), true); } }