String partitionSpecs; inputFileChanged = false; if (conf.getPartColumns() == null || conf.getPartColumns().size() == 0) { partitionSpecs = ""; // non-partitioned } else { assert inputObjInspectors[0].getCategory() == ObjectInspector.Category.STRUCT : "input object inspector is not struct"; writable = new ArrayList<Object>(conf.getPartColumns().size()); values = new ArrayList<String>(conf.getPartColumns().size()); dpStartCol = 0; StructObjectInspector soi = (StructObjectInspector) inputObjInspectors[0]; for (StructField sf : soi.getAllStructFieldRefs()) { String fn = sf.getFieldName(); if (!conf.getPartColumns().contains(fn)) { dpStartCol++; } else { .getPartColumns().size(), (StructObjectInspector) inputObjInspectors[0], ObjectInspectorCopyOption.WRITABLE); partitionSpecs = FileUtils.makePartName(conf.getPartColumns(), values); if (LOG.isInfoEnabled()) { LOG.info("Stats Gathering found a new partition spec = " + partitionSpecs);
String partitionSpecs; inputFileChanged = false; if (conf.getPartColumns() == null || conf.getPartColumns().size() == 0) { partitionSpecs = ""; // non-partitioned } else { assert inputObjInspectors[0].getCategory() == ObjectInspector.Category.STRUCT : "input object inspector is not struct"; writable = new ArrayList<Object>(conf.getPartColumns().size()); values = new ArrayList<String>(conf.getPartColumns().size()); dpStartCol = 0; StructObjectInspector soi = (StructObjectInspector) inputObjInspectors[0]; for (StructField sf : soi.getAllStructFieldRefs()) { String fn = sf.getFieldName(); if (!conf.getPartColumns().contains(fn)) { dpStartCol++; } else { .getPartColumns().size(), (StructObjectInspector) inputObjInspectors[0], ObjectInspectorCopyOption.WRITABLE); partitionSpecs = FileUtils.makePartName(conf.getPartColumns(), values); if (isLogInfoEnabled) { LOG.info("Stats Gathering found a new partition spec = " + partitionSpecs);
@Override protected void initializeOp(Configuration hconf) throws HiveException { initializeChildren(hconf); inputFileChanged = false; if (conf == null) { return; } if (!conf.isGatherStats()) { return; } this.hconf = hconf; if (hconf instanceof JobConf) { jc = (JobConf) hconf; } else { // test code path jc = new JobConf(hconf, ExecDriver.class); } stat = null; partitionSpecs = null; if (conf.getPartColumns() == null || conf.getPartColumns().size() == 0) { // NON PARTITIONED table return; } }
inputFileChanged = false; stat = new Stat(); if (conf.getPartColumns() == null || conf.getPartColumns().size() == 0) { partitionSpecs = ""; } else { "input object inspector is not struct"; writable = new ArrayList<Object>(conf.getPartColumns().size()); values = new ArrayList<String>(conf.getPartColumns().size()); dpStartCol = 0; StructObjectInspector soi = (StructObjectInspector) inputObjInspectors[0]; for (StructField sf: soi.getAllStructFieldRefs()) { String fn = sf.getFieldName(); if (!conf.getPartColumns().contains(fn)) { dpStartCol++; } else { ObjectInspectorUtils.partialCopyToStandardObject(writable, row, dpStartCol, conf.getPartColumns().size(), (StructObjectInspector) inputObjInspectors[0], ObjectInspectorCopyOption.WRITABLE); values.add(o.toString()); partitionSpecs = FileUtils.makePartName(conf.getPartColumns(), values); LOG.info("Stats Gathering found a new partition spec = " + partitionSpecs);
@Override protected Collection<Future<?>> initializeOp(Configuration hconf) throws HiveException { Collection<Future<?>> result = super.initializeOp(hconf); inputFileChanged = false; if (conf == null) { return result; } rowLimit = conf.getRowLimit(); if (!conf.isGatherStats()) { return result; } this.hconf = hconf; if (hconf instanceof JobConf) { jc = (JobConf) hconf; } else { // test code path jc = new JobConf(hconf); } defaultPartitionName = HiveConf.getVar(hconf, HiveConf.ConfVars.DEFAULTPARTITIONNAME); currentStat = null; stats = new HashMap<String, Stat>(); if (conf.getPartColumns() == null || conf.getPartColumns().size() == 0) { // NON PARTITIONED table return result; } return result; }
String partitionSpecs; inputFileChanged = false; if (conf.getPartColumns() == null || conf.getPartColumns().size() == 0) { partitionSpecs = ""; // non-partitioned } else { assert inputObjInspectors[0].getCategory() == ObjectInspector.Category.STRUCT : "input object inspector is not struct"; writable = new ArrayList<Object>(conf.getPartColumns().size()); values = new ArrayList<String>(conf.getPartColumns().size()); dpStartCol = 0; StructObjectInspector soi = (StructObjectInspector) inputObjInspectors[0]; for (StructField sf : soi.getAllStructFieldRefs()) { String fn = sf.getFieldName(); if (!conf.getPartColumns().contains(fn)) { dpStartCol++; } else { .getPartColumns().size(), (StructObjectInspector) inputObjInspectors[0], ObjectInspectorCopyOption.WRITABLE); partitionSpecs = FileUtils.makePartName(conf.getPartColumns(), values); if (isLogInfoEnabled) { LOG.info("Stats Gathering found a new partition spec = " + partitionSpecs);