public void clear() throws IOException { if (resDir != null) { try { FileSystem fs = resDir.getFileSystem(conf); LOG.debug("Deleting result dir: {}", resDir); fs.delete(resDir, true); } catch (IOException e) { LOG.info("Context clear error: " + StringUtils.stringifyException(e)); } } if (resFile != null) { try { FileSystem fs = resFile.getFileSystem(conf); LOG.debug("Deleting result file: {}", resFile); fs.delete(resFile, false); } catch (IOException e) { LOG.info("Context clear error: " + StringUtils.stringifyException(e)); } } removeMaterializedCTEs(); removeScratchDir(); originalTracker = null; setNeedLockMgr(false); }
public void clear() throws IOException { // First clear the other contexts created by this query for (Context subContext : rewrittenStatementContexts) { subContext.clear(); } // Then clear this context if (resDir != null) { try { FileSystem fs = resDir.getFileSystem(conf); LOG.debug("Deleting result dir: {}", resDir); fs.delete(resDir, true); } catch (IOException e) { LOG.info("Context clear error: " + StringUtils.stringifyException(e)); } } if (resFile != null) { try { FileSystem fs = resFile.getFileSystem(conf); LOG.debug("Deleting result file: {}", resFile); fs.delete(resFile, false); } catch (IOException e) { LOG.info("Context clear error: " + StringUtils.stringifyException(e)); } } removeMaterializedCTEs(); removeScratchDir(); originalTracker = null; setNeedLockMgr(false); }
/** * Add the task according to the parsed command tree. This is used for the CLI * command "UNLOCK TABLE ..;". * * @param ast * The parsed command tree. * @throws SemanticException * Parsing failed */ private void analyzeUnlockTable(ASTNode ast) throws SemanticException { String tableName = getUnescapedName((ASTNode) ast.getChild(0)); List<Map<String, String>> partSpecs = getPartitionSpecs(getTable(tableName), ast); // We only can have a single partition spec assert (partSpecs.size() <= 1); Map<String, String> partSpec = null; if (partSpecs.size() > 0) { partSpec = partSpecs.get(0); } UnlockTableDesc unlockTblDesc = new UnlockTableDesc(tableName, partSpec); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), unlockTblDesc))); // Need to initialize the lock manager ctx.setNeedLockMgr(true); }
private void analyzeUnlockDatabase(ASTNode ast) throws SemanticException { String dbName = unescapeIdentifier(ast.getChild(0).getText()); inputs.add(new ReadEntity(getDatabase(dbName))); // Unlock database operation is to release the lock explicitly, the // operation itself don't need to be locked. Set the WriteEntity as // WriteType: DDL_NO_LOCK here, otherwise it will conflict with // Hive's transaction. outputs.add(new WriteEntity(getDatabase(dbName), WriteType.DDL_NO_LOCK)); UnlockDatabaseDesc unlockDatabaseDesc = new UnlockDatabaseDesc(dbName); DDLWork work = new DDLWork(getInputs(), getOutputs(), unlockDatabaseDesc); rootTasks.add(TaskFactory.get(work)); // Need to initialize the lock manager ctx.setNeedLockMgr(true); }
/** * Add the task according to the parsed command tree. This is used for the CLI * command "UNLOCK TABLE ..;". * * @param ast * The parsed command tree. * @throws SemanticException * Parsing failed */ private void analyzeUnlockTable(ASTNode ast) throws SemanticException { String tableName = getUnescapedName((ASTNode) ast.getChild(0)); List<Map<String, String>> partSpecs = getPartitionSpecs(getTable(tableName), ast); // We only can have a single partition spec assert (partSpecs.size() <= 1); Map<String, String> partSpec = null; if (partSpecs.size() > 0) { partSpec = partSpecs.get(0); } UnlockTableDesc unlockTblDesc = new UnlockTableDesc(tableName, partSpec); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), unlockTblDesc), conf)); // Need to initialize the lock manager ctx.setNeedLockMgr(true); }
private void analyzeUnlockDatabase(ASTNode ast) throws SemanticException { String dbName = unescapeIdentifier(ast.getChild(0).getText()); inputs.add(new ReadEntity(getDatabase(dbName))); // Unlock database operation is to release the lock explicitly, the // operation itself don't need to be locked. Set the WriteEntity as // WriteType: DDL_NO_LOCK here, otherwise it will conflict with // Hive's transaction. outputs.add(new WriteEntity(getDatabase(dbName), WriteType.DDL_NO_LOCK)); UnlockDatabaseDesc unlockDatabaseDesc = new UnlockDatabaseDesc(dbName); DDLWork work = new DDLWork(getInputs(), getOutputs(), unlockDatabaseDesc); rootTasks.add(TaskFactory.get(work, conf)); // Need to initialize the lock manager ctx.setNeedLockMgr(true); }
/** * Add the task according to the parsed command tree. This is used for the CLI * command "LOCK TABLE ..;". * * @param ast * The parsed command tree. * @throws SemanticException * Parsing failed */ private void analyzeLockTable(ASTNode ast) throws SemanticException { String tableName = getUnescapedName((ASTNode) ast.getChild(0)).toLowerCase(); String mode = unescapeIdentifier(ast.getChild(1).getText().toUpperCase()); List<Map<String, String>> partSpecs = getPartitionSpecs(getTable(tableName), ast); // We only can have a single partition spec assert (partSpecs.size() <= 1); Map<String, String> partSpec = null; if (partSpecs.size() > 0) { partSpec = partSpecs.get(0); } LockTableDesc lockTblDesc = new LockTableDesc(tableName, mode, partSpec, HiveConf.getVar(conf, ConfVars.HIVEQUERYID)); lockTblDesc.setQueryStr(this.ctx.getCmd()); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), lockTblDesc))); // Need to initialize the lock manager ctx.setNeedLockMgr(true); }
private void analyzeLockDatabase(ASTNode ast) throws SemanticException { String dbName = unescapeIdentifier(ast.getChild(0).getText()); String mode = unescapeIdentifier(ast.getChild(1).getText().toUpperCase()); inputs.add(new ReadEntity(getDatabase(dbName))); // Lock database operation is to acquire the lock explicitly, the operation // itself doesn't need to be locked. Set the WriteEntity as WriteType: // DDL_NO_LOCK here, otherwise it will conflict with Hive's transaction. outputs.add(new WriteEntity(getDatabase(dbName), WriteType.DDL_NO_LOCK)); LockDatabaseDesc lockDatabaseDesc = new LockDatabaseDesc(dbName, mode, HiveConf.getVar(conf, ConfVars.HIVEQUERYID)); lockDatabaseDesc.setQueryStr(ctx.getCmd()); DDLWork work = new DDLWork(getInputs(), getOutputs(), lockDatabaseDesc); rootTasks.add(TaskFactory.get(work)); ctx.setNeedLockMgr(true); }
/** * Add the task according to the parsed command tree. This is used for the CLI * command "LOCK TABLE ..;". * * @param ast * The parsed command tree. * @throws SemanticException * Parsing failed */ private void analyzeLockTable(ASTNode ast) throws SemanticException { String tableName = getUnescapedName((ASTNode) ast.getChild(0)).toLowerCase(); String mode = unescapeIdentifier(ast.getChild(1).getText().toUpperCase()); List<Map<String, String>> partSpecs = getPartitionSpecs(getTable(tableName), ast); // We only can have a single partition spec assert (partSpecs.size() <= 1); Map<String, String> partSpec = null; if (partSpecs.size() > 0) { partSpec = partSpecs.get(0); } LockTableDesc lockTblDesc = new LockTableDesc(tableName, mode, partSpec, HiveConf.getVar(conf, ConfVars.HIVEQUERYID)); lockTblDesc.setQueryStr(this.ctx.getCmd()); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), lockTblDesc), conf)); // Need to initialize the lock manager ctx.setNeedLockMgr(true); }
private void analyzeLockDatabase(ASTNode ast) throws SemanticException { String dbName = unescapeIdentifier(ast.getChild(0).getText()); String mode = unescapeIdentifier(ast.getChild(1).getText().toUpperCase()); inputs.add(new ReadEntity(getDatabase(dbName))); // Lock database operation is to acquire the lock explicitly, the operation // itself doesn't need to be locked. Set the WriteEntity as WriteType: // DDL_NO_LOCK here, otherwise it will conflict with Hive's transaction. outputs.add(new WriteEntity(getDatabase(dbName), WriteType.DDL_NO_LOCK)); LockDatabaseDesc lockDatabaseDesc = new LockDatabaseDesc(dbName, mode, HiveConf.getVar(conf, ConfVars.HIVEQUERYID)); lockDatabaseDesc.setQueryStr(ctx.getCmd()); DDLWork work = new DDLWork(getInputs(), getOutputs(), lockDatabaseDesc); rootTasks.add(TaskFactory.get(work, conf)); ctx.setNeedLockMgr(true); }
/** * Add the task according to the parsed command tree. This is used for the CLI * command "SHOW LOCKS DATABASE database [extended];". * * @param ast * The parsed command tree. * @throws SemanticException * Parsing failed */ private void analyzeShowDbLocks(ASTNode ast) throws SemanticException { boolean isExtended = (ast.getChildCount() > 1); String dbName = stripQuotes(ast.getChild(0).getText()); HiveTxnManager txnManager = null; try { txnManager = TxnManagerFactory.getTxnManagerFactory().getTxnManager(conf); } catch (LockException e) { throw new SemanticException(e.getMessage()); } ShowLocksDesc showLocksDesc = new ShowLocksDesc(ctx.getResFile(), dbName, isExtended, txnManager.useNewShowLocksFormat()); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showLocksDesc))); setFetchTask(createFetchTask(showLocksDesc.getSchema())); // Need to initialize the lock manager ctx.setNeedLockMgr(true); }
/** * Add the task according to the parsed command tree. This is used for the CLI * command "SHOW LOCKS DATABASE database [extended];". * * @param ast * The parsed command tree. * @throws SemanticException * Parsing failed */ private void analyzeShowDbLocks(ASTNode ast) throws SemanticException { boolean isExtended = (ast.getChildCount() > 1); String dbName = stripQuotes(ast.getChild(0).getText()); HiveTxnManager txnManager = null; try { txnManager = TxnManagerFactory.getTxnManagerFactory().getTxnManager(conf); } catch (LockException e) { throw new SemanticException(e.getMessage()); } ShowLocksDesc showLocksDesc = new ShowLocksDesc(ctx.getResFile(), dbName, isExtended, txnManager.useNewShowLocksFormat()); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showLocksDesc), conf)); setFetchTask(createFetchTask(showLocksDesc.getSchema())); // Need to initialize the lock manager ctx.setNeedLockMgr(true); }
ctx.setNeedLockMgr(true);
ctx.setNeedLockMgr(true);
public void clear() throws IOException { if (resDir != null) { try { FileSystem fs = resDir.getFileSystem(conf); fs.delete(resDir, true); } catch (IOException e) { LOG.info("Context clear error: " + StringUtils.stringifyException(e)); } } if (resFile != null) { try { FileSystem fs = resFile.getFileSystem(conf); fs.delete(resFile, false); } catch (IOException e) { LOG.info("Context clear error: " + StringUtils.stringifyException(e)); } } removeScratchDir(); originalTracker = null; setNeedLockMgr(false); }
public void clear() throws IOException { if (resDir != null) { try { FileSystem fs = resDir.getFileSystem(conf); fs.delete(resDir, true); } catch (IOException e) { LOG.info("Context clear error: " + StringUtils.stringifyException(e)); } } if (resFile != null) { try { FileSystem fs = resFile.getFileSystem(conf); fs.delete(resFile, false); } catch (IOException e) { LOG.info("Context clear error: " + StringUtils.stringifyException(e)); } } removeScratchDir(); originalTracker = null; setNeedLockMgr(false); }
/** * Add the task according to the parsed command tree. This is used for the CLI * command "UNLOCK TABLE ..;". * * @param ast * The parsed command tree. * @throws SemanticException * Parsing failed */ private void analyzeUnlockTable(ASTNode ast) throws SemanticException { String tableName = getUnescapedName((ASTNode)ast.getChild(0)); List<Map<String, String>> partSpecs = getPartitionSpecs(ast); // We only can have a single partition spec assert(partSpecs.size() <= 1); Map<String, String> partSpec = null; if (partSpecs.size() > 0) { partSpec = partSpecs.get(0); } UnlockTableDesc unlockTblDesc = new UnlockTableDesc(tableName, partSpec); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), unlockTblDesc), conf)); // Need to initialize the lock manager ctx.setNeedLockMgr(true); }
private void analyzeUnlockDatabase(ASTNode ast) throws SemanticException { String dbName = unescapeIdentifier(ast.getChild(0).getText()); UnlockDatabaseDesc unlockDatabaseDesc = new UnlockDatabaseDesc(dbName); DDLWork work = new DDLWork(getInputs(), getOutputs(), unlockDatabaseDesc); rootTasks.add(TaskFactory.get(work, conf)); // Need to initialize the lock manager ctx.setNeedLockMgr(true); }
private void analyzeLockDatabase(ASTNode ast) throws SemanticException { String dbName = unescapeIdentifier(ast.getChild(0).getText()); String mode = unescapeIdentifier(ast.getChild(1).getText().toUpperCase()); //inputs.add(new ReadEntity(dbName)); //outputs.add(new WriteEntity(dbName)); LockDatabaseDesc lockDatabaseDesc = new LockDatabaseDesc(dbName, mode, HiveConf.getVar(conf, ConfVars.HIVEQUERYID)); lockDatabaseDesc.setQueryStr(ctx.getCmd()); DDLWork work = new DDLWork(getInputs(), getOutputs(), lockDatabaseDesc); rootTasks.add(TaskFactory.get(work, conf)); ctx.setNeedLockMgr(true); }
/** * Add the task according to the parsed command tree. This is used for the CLI * command "SHOW LOCKS DATABASE database [extended];". * * @param ast * The parsed command tree. * @throws SemanticException * Parsing failed */ private void analyzeShowDbLocks(ASTNode ast) throws SemanticException { boolean isExtended = (ast.getChildCount() > 1); String dbName = stripQuotes(ast.getChild(0).getText()); HiveTxnManager txnManager = null; try { txnManager = TxnManagerFactory.getTxnManagerFactory().getTxnManager(conf); } catch (LockException e) { throw new SemanticException(e.getMessage()); } ShowLocksDesc showLocksDesc = new ShowLocksDesc(ctx.getResFile(), dbName, isExtended, txnManager.useNewShowLocksFormat()); rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), showLocksDesc), conf)); setFetchTask(createFetchTask(showLocksDesc.getSchema())); // Need to initialize the lock manager ctx.setNeedLockMgr(true); }