@Override public PartitionDesc clone() { PartitionDesc ret = new PartitionDesc(); ret.inputFileFormatClass = inputFileFormatClass; ret.outputFileFormatClass = outputFileFormatClass; if (properties != null) { ret.setProperties((Properties) properties.clone()); } ret.tableDesc = (TableDesc) tableDesc.clone(); // The partition spec is not present if (partSpec != null) { ret.partSpec = new LinkedHashMap<>(partSpec); } if (vectorPartitionDesc != null) { ret.vectorPartitionDesc = vectorPartitionDesc.clone(); } return ret; }
/** * @param part Partition * @param tblDesc Table Descriptor * @param usePartSchemaProperties Use Partition Schema Properties to set the * partition descriptor properties. This is usually set to true by the caller * if the table is partitioned, i.e. if the table has partition columns. * @throws HiveException */ public PartitionDesc(final Partition part,final TableDesc tblDesc, boolean usePartSchemaProperties) throws HiveException { PartitionDescConstructorHelper(part, tblDesc, usePartSchemaProperties); //We use partition schema properties to set the partition descriptor properties // if usePartSchemaProperties is set to true. if (usePartSchemaProperties) { setProperties(part.getMetadataFromPartitionSchema()); } else { // each partition maintains a large properties setProperties(part.getSchemaFromTableSchema(tblDesc.getProperties())); } }
@Override public PartitionDesc clone() { PartitionDesc ret = new PartitionDesc(); ret.inputFileFormatClass = inputFileFormatClass; ret.outputFileFormatClass = outputFileFormatClass; if (properties != null) { Properties newProp = new Properties(); Enumeration<Object> keysProp = properties.keys(); while (keysProp.hasMoreElements()) { Object key = keysProp.nextElement(); newProp.put(key, properties.get(key)); } ret.setProperties(newProp); } ret.tableDesc = (TableDesc) tableDesc.clone(); // The partition spec is not present if (partSpec != null) { ret.partSpec = new LinkedHashMap<>(partSpec); } if (vectorPartitionDesc != null) { ret.vectorPartitionDesc = vectorPartitionDesc.clone(); } return ret; }
/** * @param part Partition * @param tblDesc Table Descriptor * @param usePartSchemaProperties Use Partition Schema Properties to set the * partition descriptor properties. This is usually set to true by the caller * if the table is partitioned, i.e. if the table has partition columns. * @throws HiveException */ public PartitionDesc(final Partition part,final TableDesc tblDesc, boolean usePartSchemaProperties) throws HiveException { PartitionDescConstructorHelper(part, tblDesc, usePartSchemaProperties); //We use partition schema properties to set the partition descriptor properties // if usePartSchemaProperties is set to true. if (usePartSchemaProperties) { setProperties(part.getMetadataFromPartitionSchema()); } else { // each partition maintains a large properties setProperties(part.getSchemaFromTableSchema(tblDesc.getProperties())); } }
public PartitionDesc(final Partition part, final TableDesc tableDesc) throws HiveException { PartitionDescConstructorHelper(part, tableDesc, true); if (Utilities.isInputFileFormatSelfDescribing(this)) { // if IF is self describing no need to send column info per partition, since its not used anyway. Table tbl = part.getTable(); setProperties(MetaStoreUtils.getSchemaWithoutCols(part.getTPartition().getSd(), part.getParameters(), tbl.getDbName(), tbl.getTableName(), tbl.getPartitionKeys())); } else { setProperties(part.getMetadataFromPartitionSchema()); } }
public void initEmptyInputChildren(List<Operator<?>> children, Configuration hconf) throws SerDeException, Exception { setChildOperators(children); Map<String, Configuration> tableNameToConf = cloneConfsForNestedColPruning(hconf); for (Operator<?> child : children) { TableScanOperator tsOp = (TableScanOperator) child; StructObjectInspector soi = null; PartitionDesc partDesc = conf.getAliasToPartnInfo().get(tsOp.getConf().getAlias()); Configuration newConf = tableNameToConf.get(partDesc.getTableDesc().getTableName()); Deserializer serde = partDesc.getTableDesc().getDeserializer(); partDesc.setProperties(partDesc.getProperties()); MapOpCtx opCtx = new MapOpCtx(tsOp.getConf().getAlias(), child, partDesc); StructObjectInspector tableRowOI = (StructObjectInspector) serde.getObjectInspector(); initObjectInspector(newConf, opCtx, tableRowOI); soi = opCtx.rowObjectInspector; child.getParentOperators().add(this); childrenOpToOI.put(child, soi); } }
public PartitionDesc(final Partition part) throws HiveException { PartitionDescConstructorHelper(part, getTableDesc(part.getTable()), true); if (Utilities.isInputFileFormatSelfDescribing(this)) { // if IF is self describing no need to send column info per partition, since its not used anyway. Table tbl = part.getTable(); setProperties(MetaStoreUtils.getSchemaWithoutCols(part.getTPartition().getSd(), part.getTPartition().getSd(), part.getParameters(), tbl.getDbName(), tbl.getTableName(), tbl.getPartitionKeys())); } else { setProperties(part.getMetadataFromPartitionSchema()); } }
public void initEmptyInputChildren(List<Operator<?>> children, Configuration hconf) throws SerDeException, Exception { setChildOperators(children); Map<String, Configuration> tableNameToConf = cloneConfsForNestedColPruning(hconf); for (Operator<?> child : children) { TableScanOperator tsOp = (TableScanOperator) child; StructObjectInspector soi = null; PartitionDesc partDesc = conf.getAliasToPartnInfo().get(tsOp.getConf().getAlias()); Configuration newConf = tableNameToConf.get(partDesc.getTableDesc().getTableName()); Deserializer serde = partDesc.getTableDesc().getDeserializer(); partDesc.setProperties(partDesc.getProperties()); MapOpCtx opCtx = new MapOpCtx(tsOp.getConf().getAlias(), child, partDesc); StructObjectInspector tableRowOI = (StructObjectInspector) serde.getObjectInspector(); initObjectInspector(newConf, opCtx, tableRowOI); soi = opCtx.rowObjectInspector; child.getParentOperators().add(this); childrenOpToOI.put(child, soi); } }
@Override public PartitionDesc clone() { PartitionDesc ret = new PartitionDesc(); ret.inputFileFormatClass = inputFileFormatClass; ret.outputFileFormatClass = outputFileFormatClass; if (properties != null) { Properties newProp = new Properties(); Enumeration<Object> keysProp = properties.keys(); while (keysProp.hasMoreElements()) { Object key = keysProp.nextElement(); newProp.put(key, properties.get(key)); } ret.setProperties(newProp); } ret.tableDesc = (TableDesc) tableDesc.clone(); // The partition spec is not present if (partSpec != null) { ret.partSpec = new java.util.LinkedHashMap<String, String>(); ret.partSpec.putAll(partSpec); } return ret; }
@Override public PartitionDesc clone() { PartitionDesc ret = new PartitionDesc(); ret.setSerdeClassName(serdeClassName); ret.setDeserializerClass(deserializerClass); ret.inputFileFormatClass = inputFileFormatClass; ret.outputFileFormatClass = outputFileFormatClass; if (properties != null) { Properties newProp = new Properties(); Enumeration<Object> keysProp = properties.keys(); while (keysProp.hasMoreElements()) { Object key = keysProp.nextElement(); newProp.put(key, properties.get(key)); } ret.setProperties(newProp); } ret.tableDesc = (TableDesc) tableDesc.clone(); // The partition spec is not present if (partSpec != null) { ret.partSpec = new java.util.LinkedHashMap<String, String>(); ret.partSpec.putAll(partSpec); } return ret; }
public PartitionDesc(final Partition part,final TableDesc tblDesc) throws HiveException { this.tableDesc = tblDesc; setProperties(part.getSchemaFromTableSchema(tblDesc.getProperties())); // each partition maintains a large properties partSpec = part.getSpec(); setOutputFileFormatClass(part.getInputFormatClass()); setOutputFileFormatClass(part.getOutputFormatClass()); }
public PartitionDesc(final Partition part) throws HiveException { this.tableDesc = Utilities.getTableDesc(part.getTable()); setProperties(part.getMetadataFromPartitionSchema()); partSpec = part.getSpec(); setInputFileFormatClass(part.getInputFormatClass()); setOutputFileFormatClass(part.getOutputFormatClass()); }
Properties target = aliasPartnDesc.getProperties(); if (target == null) { aliasPartnDesc.setProperties(target = new Properties());