@Override public PartitionDesc clone() { PartitionDesc ret = new PartitionDesc(); ret.inputFileFormatClass = inputFileFormatClass; ret.outputFileFormatClass = outputFileFormatClass; if (properties != null) { ret.setProperties((Properties) properties.clone()); } ret.tableDesc = (TableDesc) tableDesc.clone(); // The partition spec is not present if (partSpec != null) { ret.partSpec = new LinkedHashMap<>(partSpec); } if (vectorPartitionDesc != null) { ret.vectorPartitionDesc = vectorPartitionDesc.clone(); } return ret; }
@Override public PartitionDesc clone() { PartitionDesc ret = new PartitionDesc(); ret.inputFileFormatClass = inputFileFormatClass; ret.outputFileFormatClass = outputFileFormatClass; if (properties != null) { Properties newProp = new Properties(); Enumeration<Object> keysProp = properties.keys(); while (keysProp.hasMoreElements()) { Object key = keysProp.nextElement(); newProp.put(key, properties.get(key)); } ret.setProperties(newProp); } ret.tableDesc = (TableDesc) tableDesc.clone(); // The partition spec is not present if (partSpec != null) { ret.partSpec = new LinkedHashMap<>(partSpec); } if (vectorPartitionDesc != null) { ret.vectorPartitionDesc = vectorPartitionDesc.clone(); } return ret; }
@Override public Object clone() { ReduceSinkDesc desc = new ReduceSinkDesc(); desc.setKeyCols((ArrayList<ExprNodeDesc>) getKeyCols().clone()); desc.setValueCols((ArrayList<ExprNodeDesc>) getValueCols().clone()); desc.setOutputKeyColumnNames((ArrayList<String>) getOutputKeyColumnNames().clone()); List<List<Integer>> distinctColumnIndicesClone = new ArrayList<List<Integer>>(); for (List<Integer> distinctColumnIndex : getDistinctColumnIndices()) { List<Integer> tmp = new ArrayList<Integer>(); tmp.addAll(distinctColumnIndex); distinctColumnIndicesClone.add(tmp); } desc.setDistinctColumnIndices(distinctColumnIndicesClone); desc.setOutputValueColumnNames((ArrayList<String>) getOutputValueColumnNames().clone()); desc.setNumDistributionKeys(getNumDistributionKeys()); desc.setTag(getTag()); desc.setNumReducers(getNumReducers()); desc.setPartitionCols((ArrayList<ExprNodeDesc>) getPartitionCols().clone()); desc.setKeySerializeInfo((TableDesc) getKeySerializeInfo().clone()); desc.setValueSerializeInfo((TableDesc) getValueSerializeInfo().clone()); desc.setNumBuckets(numBuckets); desc.setBucketCols(bucketCols); desc.setStatistics(this.getStatistics()); desc.setSkipTag(skipTag); desc.reduceTraits = reduceTraits.clone(); desc.setDeduplicated(isDeduplicated); desc.setHasOrderBy(hasOrderBy); desc.outputName = outputName; return desc; }
desc.setNumReducers(getNumReducers()); desc.setPartitionCols((ArrayList<ExprNodeDesc>) getPartitionCols().clone()); desc.setKeySerializeInfo((TableDesc) getKeySerializeInfo().clone()); desc.setValueSerializeInfo((TableDesc) getValueSerializeInfo().clone()); desc.setNumBuckets(numBuckets); desc.setBucketCols(bucketCols);
MapredWork currPlan = (MapredWork) currTask.getWork(); TableDesc keyTblDesc = (TableDesc) currPlan.getReduceWork().getKeyDesc().clone(); List<String> joinKeys = Utilities .getColumnNames(keyTblDesc.getProperties());
ret.setKeyTableDesc((TableDesc) getKeyTableDesc().clone());
ret.setTagOrder(getTagOrder().clone()); if (getKeyTableDesc() != null) { ret.setKeyTableDesc((TableDesc) getKeyTableDesc().clone());
TableDesc keyTblDesc = (TableDesc) reduceWork.getKeyDesc().clone(); List<String> joinKeys = Utilities .getColumnNames(keyTblDesc.getProperties());
MapredWork currPlan = (MapredWork) currTask.getWork(); TableDesc keyTblDesc = (TableDesc) currPlan.getReduceWork().getKeyDesc().clone(); List<String> joinKeys = Utilities .getColumnNames(keyTblDesc.getProperties());
TableDesc keyTblDesc = (TableDesc) reduceWork.getKeyDesc().clone(); List<String> joinKeys = Utilities .getColumnNames(keyTblDesc.getProperties());
TableDesc ts = (TableDesc) fsInputDesc.getTableInfo().clone(); FileSinkDesc fsOutputDesc = new FileSinkDesc(finalName, ts, conf.getBoolVar(ConfVars.COMPRESSRESULT));
TableDesc ts = (TableDesc) fsInputDesc.getTableInfo().clone(); Path mergeDest = srcMmWriteId == null ? finalName : finalName.getParent(); fsOutputDesc = new FileSinkDesc(mergeDest, ts, conf.getBoolVar(ConfVars.COMPRESSRESULT));
@Override public PartitionDesc clone() { PartitionDesc ret = new PartitionDesc(); ret.inputFileFormatClass = inputFileFormatClass; ret.outputFileFormatClass = outputFileFormatClass; if (properties != null) { Properties newProp = new Properties(); Enumeration<Object> keysProp = properties.keys(); while (keysProp.hasMoreElements()) { Object key = keysProp.nextElement(); newProp.put(key, properties.get(key)); } ret.setProperties(newProp); } ret.tableDesc = (TableDesc) tableDesc.clone(); // The partition spec is not present if (partSpec != null) { ret.partSpec = new java.util.LinkedHashMap<String, String>(); ret.partSpec.putAll(partSpec); } return ret; }
@Override public PartitionDesc clone() { PartitionDesc ret = new PartitionDesc(); ret.setSerdeClassName(serdeClassName); ret.setDeserializerClass(deserializerClass); ret.inputFileFormatClass = inputFileFormatClass; ret.outputFileFormatClass = outputFileFormatClass; if (properties != null) { Properties newProp = new Properties(); Enumeration<Object> keysProp = properties.keys(); while (keysProp.hasMoreElements()) { Object key = keysProp.nextElement(); newProp.put(key, properties.get(key)); } ret.setProperties(newProp); } ret.tableDesc = (TableDesc) tableDesc.clone(); // The partition spec is not present if (partSpec != null) { ret.partSpec = new java.util.LinkedHashMap<String, String>(); ret.partSpec.putAll(partSpec); } return ret; }
@Override public Object clone() { ReduceSinkDesc desc = new ReduceSinkDesc(); desc.setKeyCols((ArrayList<ExprNodeDesc>) getKeyCols().clone()); desc.setValueCols((ArrayList<ExprNodeDesc>) getValueCols().clone()); desc.setOutputKeyColumnNames((ArrayList<String>) getOutputKeyColumnNames().clone()); List<List<Integer>> distinctColumnIndicesClone = new ArrayList<List<Integer>>(); for (List<Integer> distinctColumnIndex : getDistinctColumnIndices()) { List<Integer> tmp = new ArrayList<Integer>(); tmp.addAll(distinctColumnIndex); distinctColumnIndicesClone.add(tmp); } desc.setDistinctColumnIndices(distinctColumnIndicesClone); desc.setOutputValueColumnNames((ArrayList<String>) getOutputValueColumnNames().clone()); desc.setNumDistributionKeys(getNumDistributionKeys()); desc.setTag(getTag()); desc.setNumReducers(getNumReducers()); desc.setPartitionCols((ArrayList<ExprNodeDesc>) getPartitionCols().clone()); desc.setKeySerializeInfo((TableDesc) getKeySerializeInfo().clone()); desc.setValueSerializeInfo((TableDesc) getValueSerializeInfo().clone()); desc.setNumBuckets(numBuckets); desc.setBucketCols(bucketCols); desc.setStatistics(this.getStatistics()); desc.setSkipTag(skipTag); desc.reduceTraits = reduceTraits.clone(); desc.setEnforceSort(enforceSort); desc.setHasOrderBy(hasOrderBy); return desc; }
ret.setTagOrder(getTagOrder().clone()); if (getKeyTableDesc() != null) { ret.setKeyTableDesc((TableDesc) getKeyTableDesc().clone());
TableDesc keyTblDesc = (TableDesc) reduceWork.getKeyDesc().clone(); List<String> joinKeys = Utilities .getColumnNames(keyTblDesc.getProperties());
new RowSchema(out_rwsch.getColumnInfos())); TableDesc ts = (TableDesc) fsConf.getTableInfo().clone(); fsConf.getTableInfo().getProperties().remove( org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_PARTITION_COLUMNS);
TableDesc ts = (TableDesc) fsInputDesc.getTableInfo().clone(); FileSinkDesc fsOutputDesc = new FileSinkDesc(finalName, ts, parseCtx.getConf().getBoolVar(HiveConf.ConfVars.COMPRESSRESULT));
TableDesc ts = (TableDesc) fsInputDesc.getTableInfo().clone(); FileSinkDesc fsOutputDesc = new FileSinkDesc(finalName, ts, conf.getBoolVar(ConfVars.COMPRESSRESULT));