@Override public OptionValue getDefault() { return inner.getDefault(); }
@Override public Iterator<OptionValue> iterator() { final Map<String, OptionValue> buildList = CaseInsensitiveMap.newHashMap(); // populate the default options for (final Map.Entry<String, OptionValidator> entry : validators.entrySet()) { buildList.put(entry.getKey(), entry.getValue().getDefault()); } // override if changed for (final Map.Entry<String, OptionValue> entry : Lists.newArrayList(options.getAll())) { buildList.put(entry.getKey(), entry.getValue()); } return buildList.values().iterator(); }
@Override public OptionValue getOption(final String name) { // check local space (persistent store) final OptionValue value = options.get(name); if (value != null) { return value; } // otherwise, return default. final OptionValidator validator = getValidator(name); return validator.getDefault(); }
@Override OptionValue getLocalOption(final String name) { final OptionValue value = super.getLocalOption(name); if (shortLivedOptions.containsKey(name)) { if (withinRange(name)) { return value; } final int queryNumber = session.getQueryCount(); final int start = shortLivedOptions.get(name).getLeft(); // option is not in effect if queryNumber < start if (queryNumber < start) { return getValidator(name).getDefault(); // reset if queryNumber <= end } else { options.remove(name); shortLivedOptions.remove(name); return null; // fallback takes effect } } return value; }
@Override public void setOption(final OptionValue value) { checkArgument(value.getType() == OptionType.SYSTEM, "OptionType must be SYSTEM."); final String name = value.getName().toLowerCase(); final OptionValidator validator = getValidator(name); validator.validate(value); // validate the option if (options.get(name) == null && value.equals(validator.getDefault())) { return; // if the option is not overridden, ignore setting option to default } options.put(name, value); }
switch(validator.getDefault().getKind()){ case BOOLEAN: this.setOption(OptionValue.createBoolean(OptionType.SYSTEM, validator.getOptionName(), Boolean.parseBoolean(value)));
@Test public void testTimestampNulls() throws Exception { try { test(String.format("alter session set \"%s\" = true", HivePluginOptions.HIVE_OPTIMIZE_SCAN_WITH_NATIVE_READERS)); String query = "SELECT * FROM hive.parquet_timestamp_nulls"; test(query); } finally { test(String.format("alter session set \"%s\" = %s", HivePluginOptions.HIVE_OPTIMIZE_SCAN_WITH_NATIVE_READERS, HivePluginOptions.HIVE_OPTIMIZE_SCAN_WITH_NATIVE_READERS_VALIDATOR.getDefault().getBoolVal() ? "true" : "false")); } }
@Override public OptionValueWrapper next() { final OptionValue value = mergedOptions.next(); final Status status; if (value.getType() == OptionType.BOOT) { status = Status.BOOT; } else { final OptionValue def = fragmentOptions.getValidator(value.getName()).getDefault(); if (value.equalsIgnoreType(def)) { status = Status.DEFAULT; } else { status = Status.CHANGED; } } return new OptionValueWrapper(value.getName(), value.getKind(), value.getType(), value.getNumVal(), value.getStringVal(), value.getBoolVal(), value.getFloatVal(), status); }
public AbstractRecordReader(final OperatorContext context, final List<SchemaPath> columns) { this.context = context; if (context == null) { this.numRowsPerBatch = ExecConstants.TARGET_BATCH_RECORDS_MAX.getDefault().getNumVal(); } else { this.numRowsPerBatch = context.getTargetBatchSize(); } if (context == null || context.getOptions() == null || context.getOptions().getOption(ExecConstants.OPERATOR_TARGET_BATCH_BYTES) == null) { this.numBytesPerBatch = ExecConstants.OPERATOR_TARGET_BATCH_BYTES_VALIDATOR.getDefault().getNumVal(); } else { this.numBytesPerBatch = context.getOptions().getOption(ExecConstants.OPERATOR_TARGET_BATCH_BYTES).getNumVal(); } if (columns != null) { setColumns(columns); } }
@Test // DRILL-4083 public void testNativeScanWhenNoColumnIsRead() throws Exception { try { test(String.format("alter session set \"%s\" = true", HivePluginOptions.HIVE_OPTIMIZE_SCAN_WITH_NATIVE_READERS)); String query = "SELECT count(*) as col FROM hive.kv_parquet"; testPhysicalPlan(query, "mode=[NATIVE_PARQUET"); testBuilder() .sqlQuery(query) .unOrdered() .baselineColumns("col") .baselineValues(5L) .go(); } finally { test(String.format("alter session set \"%s\" = %s", HivePluginOptions.HIVE_OPTIMIZE_SCAN_WITH_NATIVE_READERS, HivePluginOptions.HIVE_OPTIMIZE_SCAN_WITH_NATIVE_READERS_VALIDATOR.getDefault().getBoolVal() ? "true" : "false")); } }
@Test @Ignore // TODO file JIRA to fix this public void testFix2967() throws Exception { setSessionOption(PlannerSettings.BROADCAST.getOptionName(), "false"); setSessionOption(PlannerSettings.HASHJOIN.getOptionName(), "false"); setSessionOption(ExecConstants.SLICE_TARGET, "1"); setSessionOption(ExecConstants.MAX_WIDTH_PER_NODE_KEY, "23"); final String TEST_RES_PATH = TestTools.getWorkingPath() + "/src/test/resources"; try { test("select * from dfs.\"%s/join/j1\" j1 left outer join dfs.\"%s/join/j2\" j2 on (j1.c_varchar = j2.c_varchar)", TEST_RES_PATH, TEST_RES_PATH); } finally { setSessionOption(PlannerSettings.BROADCAST.getOptionName(), String.valueOf(PlannerSettings.BROADCAST.getDefault ().getBoolVal())); setSessionOption(PlannerSettings.HASHJOIN.getOptionName(), String.valueOf(PlannerSettings.HASHJOIN.getDefault() .getBoolVal())); setSessionOption(ExecConstants.SLICE_TARGET, String.valueOf(ExecConstants.SLICE_TARGET_DEFAULT)); setSessionOption(ExecConstants.MAX_WIDTH_PER_NODE_KEY, String.valueOf(ExecConstants.MAX_WIDTH_PER_NODE .getDefault().getNumVal())); } }