public WindowingSpec next(HiveConf hCfg, SemanticAnalyzer semAly, UnparseTranslator unparseT, RowResolver inputRR) throws SemanticException { SemanticException originalException = null; Iterator<Map.Entry<PartitioningSpec, WindowingSpec>> grpIt = groups.entrySet().iterator(); while (grpIt.hasNext()) { Map.Entry<PartitioningSpec, WindowingSpec> entry = grpIt.next(); WindowingSpec wSpec = entry.getValue(); try { PTFTranslator t = new PTFTranslator(); t.translate(wSpec, semAly, hCfg, inputRR, unparseT); groups.remove(entry.getKey()); return wSpec; } catch (SemanticException se) { originalException = se; } } throw new SemanticException("Failed to breakup Windowing invocations into Groups. " + "At least 1 group must only depend on input columns. " + "Also check for circular dependencies.\n" + "Underlying error: " + originalException.getMessage()); }
public WindowingSpec next(HiveConf hCfg, SemanticAnalyzer semAly, UnparseTranslator unparseT, RowResolver inputRR) throws SemanticException { SemanticException originalException = null; Iterator<Map.Entry<PartitioningSpec, WindowingSpec>> grpIt = groups.entrySet().iterator(); while (grpIt.hasNext()) { Map.Entry<PartitioningSpec, WindowingSpec> entry = grpIt.next(); WindowingSpec wSpec = entry.getValue(); try { PTFTranslator t = new PTFTranslator(); t.translate(wSpec, semAly, hCfg, inputRR, unparseT); groups.remove(entry.getKey()); return wSpec; } catch (SemanticException se) { originalException = se; } } throw new SemanticException("Failed to breakup Windowing invocations into Groups. " + "At least 1 group must only depend on input columns. " + "Also check for circular dependencies.\n" + "Underlying error: " + originalException.getMessage()); }
@Override public String convertExprToFilter(byte[] exprBytes, String defaultPartitionName) throws MetaException { ExprNodeGenericFuncDesc expr = deserializeExpr(exprBytes); if ((defaultPartitionName != null) && (!defaultPartitionName.isEmpty())) { try { ExprNodeDescUtils.replaceNullFiltersWithDefaultPartition(expr, defaultPartitionName); } catch (SemanticException ex) { LOG.error("Failed to replace \"is null\" and \"is not null\" expression with default partition", ex); throw new MetaException(ex.getMessage()); } } return expr.getExprString(); }
@Override public boolean filterPartitionsByExpr(List<FieldSchema> partColumns, byte[] exprBytes, String defaultPartitionName, List<String> partitionNames) throws MetaException { List<String> partColumnNames = new ArrayList<>(); List<PrimitiveTypeInfo> partColumnTypeInfos = new ArrayList<>(); for (FieldSchema fs : partColumns) { partColumnNames.add(fs.getName()); partColumnTypeInfos.add(TypeInfoFactory.getPrimitiveTypeInfo(fs.getType())); } ExprNodeGenericFuncDesc expr = deserializeExpr(exprBytes); try { ExprNodeDescUtils.replaceEqualDefaultPartition(expr, defaultPartitionName); } catch (SemanticException ex) { LOG.error("Failed to replace default partition", ex); throw new MetaException(ex.getMessage()); } try { long startTime = System.nanoTime(), len = partitionNames.size(); boolean result = PartitionPruner.prunePartitionNames( partColumnNames, partColumnTypeInfos, expr, defaultPartitionName, partitionNames); double timeMs = (System.nanoTime() - startTime) / 1000000.0; LOG.debug("Pruning " + len + " partition names took " + timeMs + "ms"); return result; } catch (HiveException ex) { LOG.error("Failed to apply the expression", ex); throw new MetaException(ex.getMessage()); } }
@Test public void testBogusLevel() throws Exception { boolean sawException = false; try { parseAndAnalyze("alter table foo partition(ds = 'today') compact 'bogus'"); } catch (SemanticException e) { sawException = true; Assert.assertEquals(ErrorMsg.INVALID_COMPACTION_TYPE.getMsg(), e.getMessage()); } Assert.assertTrue(sawException); }
"tree for input command - " + command + " " , e); LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e)); throw new SemanticException(e.getMessage(), e);
throw new CalciteSubquerySemanticException(e.getMessage());
throw new MetaException(e.getMessage()); } catch (SemanticException e) { throw new MetaException(e.getMessage());
LOG.error("Exception in walking operator tree. Rewrite variables not populated"); LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e)); throw new SemanticException(e.getMessage(), e);
|| (se.getMessage().contains(ErrorMsg.INVALID_TABLE.getMsg())) )){
|| (se.getMessage().contains(ErrorMsg.INVALID_TABLE.getMsg())) )){
+ ex.getMessage());
@Test public void testPatternWithoutWildCardChar() { String patternStr = ReduceSinkOperator.getOperatorName() + "%" + SelectOperator.getOperatorName() + "%" + FileSinkOperator.getOperatorName() + "%"; RuleRegExp rule1 = new RuleRegExp("R1", patternStr); assertEquals(rule1.rulePatternIsValidWithoutWildCardChar(), true); assertEquals(rule1.rulePatternIsValidWithWildCardChar(), false); // positive test Stack<Node> ns1 = new Stack<Node>(); ns1.push(new TestNode(ReduceSinkOperator.getOperatorName())); ns1.push(new TestNode(SelectOperator.getOperatorName())); ns1.push(new TestNode(FileSinkOperator.getOperatorName())); try { assertEquals(rule1.cost(ns1), patternStr.length()); } catch (SemanticException e) { fail(e.getMessage()); } // negative test Stack<Node> ns2 = new Stack<Node>(); ns2.push(new TestNode(ReduceSinkOperator.getOperatorName())); ns1.push(new TestNode(TableScanOperator.getOperatorName())); ns1.push(new TestNode(FileSinkOperator.getOperatorName())); try { assertEquals(rule1.cost(ns2), -1); } catch (SemanticException e) { fail(e.getMessage()); } }
assertNotEquals(rule1.cost(ns2), -1); } catch (SemanticException e) { fail(e.getMessage()); assertEquals(rule1.cost(ns3), -1); } catch (SemanticException e) { fail(e.getMessage());
viewSelect = handleCreateViewDDL(newAST); } catch (SemanticException e) { throw new CalciteViewSemanticException(e.getMessage());
handleCreateViewDDL(newAST); } catch (SemanticException e) { throw new CalciteViewSemanticException(e.getMessage());
private int checkLockManager() { boolean supportConcurrency = conf.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY); if (supportConcurrency && (hiveLockMgr == null)) { try { setLockManager(); } catch (SemanticException e) { errorMessage = "FAILED: Error in semantic analysis: " + e.getMessage(); SQLState = ErrorMsg.findSQLState(e.getMessage()); console.printError(errorMessage, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return (12); } } return (0); }
/** * Invoke Hive database filtering that removes the entries which use has no * privileges to access * * @param dbList * @return * @throws MetaException */ private List<String> filterDatabases(List<String> dbList) throws MetaException { try { return HiveAuthzBindingHook.filterShowDatabases(getHiveAuthzBinding(), dbList, HiveOperation.SHOWDATABASES, getUserName()); } catch (SemanticException e) { throw new MetaException("Error getting DB list " + e.getMessage()); } }
public CubeQueryContext rewrite(ASTNode astnode) throws LensException { CubeSemanticAnalyzer analyzer; try { analyzer = new CubeSemanticAnalyzer(conf, hconf); analyzer.analyze(astnode, qlCtx); } catch (SemanticException e) { throw new LensException(SYNTAX_ERROR.getLensErrorInfo(), e, e.getMessage()); } CubeQueryContext ctx = new CubeQueryContext(astnode, analyzer.getCubeQB(), conf, hconf); rewrite(rewriters, ctx); return ctx; }
public CubeQueryContext rewrite(ASTNode astnode) throws LensException { CubeSemanticAnalyzer analyzer; try { analyzer = new CubeSemanticAnalyzer(conf, hconf); analyzer.analyze(astnode, qlCtx); } catch (SemanticException e) { throw new LensException(SYNTAX_ERROR.getLensErrorInfo(), e, e.getMessage()); } CubeQueryContext ctx = new CubeQueryContext(astnode, analyzer.getCubeQB(), conf, hconf); rewrite(rewriters, ctx); return ctx; }