private void releaseContext() { try { if (ctx != null) { ctx.clear(); if (ctx.getHiveLocks() != null) { hiveLocks.addAll(ctx.getHiveLocks()); ctx.setHiveLocks(null); } ctx = null; } } catch (Exception e) { LOG.debug("Exception while clearing the context ", e); } }
private void releaseContext() { try { if (ctx != null) { ctx.clear(); if (ctx.getHiveLocks() != null) { hiveLocks.addAll(ctx.getHiveLocks()); ctx.setHiveLocks(null); } ctx = null; } } catch (Exception e) { LOG.debug("Exception while clearing the context ", e); } }
public void clear() throws IOException { // First clear the other contexts created by this query for (Context subContext : rewrittenStatementContexts) { subContext.clear(); } // Then clear this context if (resDir != null) { try { FileSystem fs = resDir.getFileSystem(conf); LOG.debug("Deleting result dir: {}", resDir); fs.delete(resDir, true); } catch (IOException e) { LOG.info("Context clear error: " + StringUtils.stringifyException(e)); } } if (resFile != null) { try { FileSystem fs = resFile.getFileSystem(conf); LOG.debug("Deleting result file: {}", resFile); fs.delete(resFile, false); } catch (IOException e) { LOG.info("Context clear error: " + StringUtils.stringifyException(e)); } } removeMaterializedCTEs(); removeScratchDir(); originalTracker = null; setNeedLockMgr(false); }
ctx.clear();
ctx.clear();
try { if (ctxCreated) { ctx.clear();
try { if (ctxCreated) { ctx.clear();
try { if (ctxCreated) { ctx.clear();
try { if (ctxCreated) { ctx.clear();
try { if (ctxCreated) { ctx.clear();
ctx.clear(); } catch (Exception e) {
try { if (ctxCreated) { ctx.clear();
try { if (ctxCreated) { ctx.clear();
ctx.clear(); } catch (Exception e) {
public List<Task<? extends Serializable>> analyzeAST(ASTNode ast) throws Exception { // Do semantic analysis and plan generation Context ctx = new Context(conf); while ((ast.getToken() == null) && (ast.getChildCount() > 0)) { ast = (ASTNode) ast.getChild(0); } sem.getOutputs().clear(); sem.getInputs().clear(); sem.analyze(ast, ctx); ctx.clear(); return sem.getRootTasks(); }
public void clear() { try { if (qlCtx != null) { qlCtx.clear(); } } catch (IOException e) { log.info("Ignoring exception in clearing qlCtx:", e); // ignoring exception in clear } } }
public void clear() { try { if (qlCtx != null) { qlCtx.clear(); } } catch (IOException e) { log.info("Ignoring exception in clearing qlCtx:", e); // ignoring exception in clear } } }
public int close() { try { if (plan != null) { FetchTask fetchTask = plan.getFetchTask(); if (null != fetchTask) { try { fetchTask.clearFetch(); } catch (Exception e) { LOG.debug(" Exception while clearing the Fetch task ", e); } } } if (ctx != null) { ctx.clear(); } if (null != resStream) { try { ((FSDataInputStream) resStream).close(); } catch (Exception e) { LOG.debug(" Exception while closing the resStream ", e); } } } catch (Exception e) { console.printError("FAILED: Hive Internal Error: " + Utilities.getNameMessage(e) + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); return 13; } return 0; }
public static ASTNode parseHQL(String query, HiveConf conf) throws LensException { ParseDriver driver = new ParseDriver(); ASTNode tree = null; Context ctx = null; try { ctx = new Context(conf); tree = driver.parse(query, ctx); tree = ParseUtils.findRootNonNullToken(tree); } catch (ParseException e) { throw new LensException(SYNTAX_ERROR.getLensErrorInfo(), e, e.getMessage()); } catch (IOException e) { throw new RuntimeException(e); } finally { if (ctx != null) { try { ctx.clear(); } catch (IOException e) { // ignoring exception in clear } } } return tree; }
public static ASTNode parseHQL(String query, HiveConf conf) throws LensException { ParseDriver driver = new ParseDriver(); ASTNode tree = null; Context ctx = null; try { ctx = new Context(conf); tree = driver.parse(query, ctx); tree = ParseUtils.findRootNonNullToken(tree); } catch (ParseException e) { throw new LensException(SYNTAX_ERROR.getLensErrorInfo(), e, e.getMessage()); } catch (IOException e) { throw new RuntimeException(e); } finally { if (ctx != null) { try { ctx.clear(); } catch (IOException e) { // ignoring exception in clear } } } return tree; }