public FilterCompat.Filter setFilter(final JobConf conf, MessageType schema) { SearchArgument sarg = ConvertAstToSearchArg.createFromConf(conf); if (sarg == null) { return null; } // Create the Parquet FilterPredicate without including columns that do not exist // on the schema (such as partition columns). FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema); if (p != null) { // Filter may have sensitive information. Do not send to debug. LOG.debug("PARQUET predicate push down generated."); ParquetInputFormat.setFilterPredicate(conf, p); return FilterCompat.get(p); } else { // Filter may have sensitive information. Do not send to debug. LOG.debug("No PARQUET predicate push down is generated."); return null; } }
public FilterCompat.Filter setFilter(final JobConf conf, MessageType schema) { SearchArgument sarg = ConvertAstToSearchArg.createFromConf(conf); if (sarg == null) { return null; } // Create the Parquet FilterPredicate without including columns that do not exist // on the schema (such as partition columns). FilterPredicate p = ParquetFilterPredicateConverter.toFilterPredicate(sarg, schema); if (p != null) { // Filter may have sensitive information. Do not send to debug. LOG.debug("PARQUET predicate push down generated."); ParquetInputFormat.setFilterPredicate(conf, p); return FilterCompat.get(p); } else { // Filter may have sensitive information. Do not send to debug. LOG.debug("No PARQUET predicate push down is generated."); return null; } }
public void configure(HiveConf queryConfig) { this.conf = queryConfig; this.sarg = ConvertAstToSearchArg.createFromConf(conf); this.isPpdEnabled = HiveConf.getBoolVar(conf, ConfVars.HIVEOPTINDEXFILTER) && HiveConf.getBoolVar(conf, ConfVars.HIVE_ORC_MS_FOOTER_CACHE_PPD); this.isInTest = HiveConf.getBoolVar(conf, ConfVars.HIVE_IN_TEST); this.sargIsOriginal = this.sargNotIsOriginal = null; }
static void setSearchArgument(Reader.Options options, List<OrcProto.Type> types, Configuration conf, boolean isOriginal) { String neededColumnNames = getNeededColumnNamesString(conf); if (neededColumnNames == null) { LOG.debug("No ORC pushdown predicate - no column names"); options.searchArgument(null, null); return; } SearchArgument sarg = ConvertAstToSearchArg.createFromConf(conf); if (sarg == null) { LOG.debug("No ORC pushdown predicate"); options.searchArgument(null, null); return; } if (LOG.isInfoEnabled()) { LOG.info("ORC pushdown predicate: " + sarg); } options.searchArgument(sarg, getSargColumnNames( neededColumnNames.split(","), types, options.getInclude(), isOriginal)); }
public void configure(HiveConf queryConfig) { this.conf = queryConfig; this.sarg = ConvertAstToSearchArg.createFromConf(conf); this.isPpdEnabled = HiveConf.getBoolVar(conf, ConfVars.HIVEOPTINDEXFILTER) && HiveConf.getBoolVar(conf, ConfVars.HIVE_ORC_MS_FOOTER_CACHE_PPD); this.isInTest = HiveConf.getBoolVar(conf, ConfVars.HIVE_IN_TEST); this.sargIsOriginal = this.sargNotIsOriginal = null; }
static void setSearchArgument(Reader.Options options, List<OrcProto.Type> types, Configuration conf, boolean isOriginal) { String neededColumnNames = getNeededColumnNamesString(conf); if (neededColumnNames == null) { LOG.debug("No ORC pushdown predicate - no column names"); options.searchArgument(null, null); return; } SearchArgument sarg = ConvertAstToSearchArg.createFromConf(conf); if (sarg == null) { LOG.debug("No ORC pushdown predicate"); options.searchArgument(null, null); return; } if (LOG.isInfoEnabled()) { LOG.info("ORC pushdown predicate: " + sarg); } options.searchArgument(sarg, getSargColumnNames( neededColumnNames.split(","), types, options.getInclude(), isOriginal)); }
this.conf = conf; this.forceThreadpool = HiveConf.getBoolVar(conf, ConfVars.HIVE_IN_TEST); this.sarg = ConvertAstToSearchArg.createFromConf(conf); minSize = HiveConf.getLongVar(conf, ConfVars.MAPREDMINSPLITSIZE, DEFAULT_MIN_SPLIT_SIZE); maxSize = HiveConf.getLongVar(conf, ConfVars.MAPREDMAXSPLITSIZE, DEFAULT_MAX_SPLIT_SIZE);
this.split = split; this.sarg = ConvertAstToSearchArg.createFromConf(job); final String fragmentId = LlapTezUtils.getFragmentId(job); final String dagId = LlapTezUtils.getDagId(job);
this.conf = conf; this.forceThreadpool = HiveConf.getBoolVar(conf, ConfVars.HIVE_IN_TEST); this.sarg = ConvertAstToSearchArg.createFromConf(conf); minSize = HiveConf.getLongVar(conf, ConfVars.MAPREDMINSPLITSIZE, DEFAULT_MIN_SPLIT_SIZE); maxSize = HiveConf.getLongVar(conf, ConfVars.MAPREDMAXSPLITSIZE, DEFAULT_MAX_SPLIT_SIZE);
this.split = split; this.sarg = ConvertAstToSearchArg.createFromConf(job); final String fragmentId = LlapTezUtils.getFragmentId(job); final String dagId = LlapTezUtils.getDagId(job);