@Test public void checkCanStoreMap() throws Exception { Map<OpTreeSignature, OperatorStats> map = new HashMap<>(); map.put(signatureFactory.getSignature(getTsOp(0)), new OperatorStats("ts0")); map.put(signatureFactory.getSignature(getTsOp(1)), new OperatorStats("ts1")); RuntimeStatsMap rsm = new RuntimeStatsMap(map); RuntimeStatsMap rsm2 = persistenceLoop(rsm, RuntimeStatsMap.class); OpTreeSignature k1 = rsm.toMap().keySet().iterator().next(); OpTreeSignature k2 = rsm2.toMap().keySet().iterator().next(); assertEquals(k1, k2); assertEquals(rsm, rsm2); }
private static Statistics applyRuntimeStats(Context context, Statistics stats, Operator<?> op) { if (!((HiveConf) context.getConf()).getBoolVar(ConfVars.HIVE_QUERY_REEXECUTION_ENABLED)) { return stats; } PlanMapper pm = context.getPlanMapper(); OpTreeSignature treeSig = pm.getSignatureOf(op); pm.link(op, treeSig); StatsSource statsSource = context.getStatsSource(); if (!statsSource.canProvideStatsFor(op.getClass())) { return stats; } Optional<OperatorStats> os = statsSource.lookup(treeSig); if (!os.isPresent()) { return stats; } LOG.debug("using runtime stats for {}; {}", op, os.get()); Statistics outStats = stats.clone(); outStats = outStats.scaleToRowCount(os.get().getOutputRecords(), false); outStats.setRuntimeStats(true); return outStats; }
if (tezCounter != null) { if (operatorStats == null) { operatorStats = new OperatorStats(operatorId); operatorStats.setOutputRecords(tezCounter.getValue());
@Test public void testStatsAreSetInReopt() throws Exception { IDriver driver = createDriver("overlay,reoptimize"); String query = "select assert_true_oom(${hiveconf:zzz} > sum(u*v))" + " from tu join tv on (tu.id_uv=tv.id_uv)" + " where u<10 and v>1"; PlanMapper pm = getMapperForQuery(driver, query); Iterator<EquivGroup> itG = pm.iterateGroups(); int checkedOperators = 0; while (itG.hasNext()) { EquivGroup g = itG.next(); List<FilterOperator> fos = g.getAll(FilterOperator.class); List<OperatorStats> oss = g.getAll(OperatorStats.class); // FIXME: oss seems to contain duplicates if (fos.size() > 0 && oss.size() > 0) { fos.sort(TestCounterMapping.OPERATOR_ID_COMPARATOR.reversed()); FilterOperator fo = fos.get(0); OperatorStats os = oss.get(0); Statistics stats = fo.getStatistics(); assertEquals(os.getOutputRecords(), stats.getNumRows()); if (!(os.getOutputRecords() == 3 || os.getOutputRecords() == 6)) { fail("nonexpected number of records produced"); } checkedOperators++; } } assertEquals(2, checkedOperators); }
@Test @Ignore("needs HiveFilter mapping") public void testMappingJoinLookup() throws ParseException { IDriver driver = createDriver(); PlanMapper pm0 = getMapperForQuery(driver, "select sum(tu.id_uv),sum(u) from tu join tv on (tu.id_uv = tv.id_uv) where u>1 and v>1"); Iterator<EquivGroup> itG = pm0.iterateGroups(); int checkedOperators = 0; while (itG.hasNext()) { EquivGroup g = itG.next(); List<HiveFilter> hfs = g.getAll(HiveFilter.class); List<OperatorStats> oss = g.getAll(OperatorStats.class); List<FilterOperator> fos = g.getAll(FilterOperator.class); if (fos.size() > 0 && oss.size() > 0) { if (hfs.size() == 0) { fail("HiveFilter is not connected?"); } OperatorStats os = oss.get(0); if (!(os.getOutputRecords() == 3 || os.getOutputRecords() == 6)) { fail("nonexpected number of records produced"); } checkedOperators++; } } assertEquals(2, checkedOperators); }