public DatabaseMetaData getDatabaseMetaData() { try { return conn.getMetaData(); } catch (SQLException e) { throw new TajoInternalError(e); } } }
public static void assertCondition(boolean condition, String message, Object... arguments) { if (!condition) { throw new TajoInternalError(String.format(message, arguments)); } } }
public static int parseValue(String literal) { try { return Integer.parseInt(literal); } catch (NumberFormatException e) { throw new TajoInternalError(e); } }
private ResultSetIterator executeQueryAndGetIter() { try { LOG.info("Generated SQL: " + generatedSql); Connection conn = DriverManager.getConnection(fragment.uri, connProperties); Statement statement = conn.createStatement(); ResultSet resultset = statement.executeQuery(generatedSql); return new ResultSetIterator((resultset)); } catch (SQLException s) { throw new TajoInternalError(s); } }
/** * @return Function Instance */ public Function newInstance() { try { Constructor<? extends Function> cons = getLegacyFuncClass().getConstructor(); return cons.newInstance(); } catch (Exception ioe) { throw new TajoInternalError("Cannot initiate function " + signature); } }
public String readSchemaFile(String path) { try { return JavaResourceUtil.readTextFromResource("schemas/" + path); } catch (IOException e) { throw new TajoInternalError(e); } }
public LogicalNode getFound() { if (found == null) { throw new TajoInternalError("cannot find the parent of " + target.getPID()); } return this.found; }
public static Path getTajoRootDir(TajoConf conf) { String rootPath = conf.getVar(ConfVars.ROOT_DIR); Preconditions.checkNotNull(rootPath, ConfVars.ROOT_DIR.varname + " must be set before a Tajo Cluster starts up"); FileSystem fs; try { fs = FileSystem.get(conf); } catch (IOException e) { throw new TajoInternalError(e); } return fs.makeQualified(new Path(rootPath)); }
public HiveCatalogStore(final Configuration conf) { if (!(conf instanceof TajoConf)) { throw new TajoInternalError("Invalid Configuration Type:" + conf.getClass().getSimpleName()); } this.conf = conf; this.defaultTableSpaceUri = TajoConf.getWarehouseDir((TajoConf) conf).toString(); this.clientPool = new HiveCatalogStoreClientPool(CLIENT_POOL_SIZE, conf); this.catalogUri = conf.get(CATALOG_URI); }
@Override public boolean isKilled() { try { return queryClient.getQueryStatus(queryId).getState() == TajoProtos.QueryState.QUERY_KILLED; } catch (QueryNotFoundException e) { throw new TajoInternalError(e); } }
public void updateCurrentNode(Expr expr) { if (expr.getType() != OpType.RelationList) { // skip relation list because it is a virtual expr. this.currentNode = exprToNodeMap.get(ObjectUtils.identityToString(expr)); if (currentNode == null) { throw new TajoInternalError("Unregistered Algebra Expression: " + expr.getType()); } } }
OffHeapRowBlockWriter(RowBlock rowBlock) { super(rowBlock.getDataTypes()); this.rowBlock = rowBlock; if (!rowBlock.getMemory().hasAddress()) { throw new TajoInternalError(rowBlock.getMemory().getClass().getSimpleName() + " does not support to direct memory access"); } }
public Connection getConnection() { try { boolean isValid = isConnValid(100); if (!isValid) { CatalogUtil.closeQuietly(conn); conn = createConnection(conf); } } catch (SQLException e) { throw new TajoInternalError(e); } return conn; }
@Override public boolean copyFromChannel(ScatteringByteChannel channel) throws IOException { switch (dataFormat) { case BuiltinStorages.DRAW: return fillDrawBuffer(channel); default: throw new TajoInternalError(new NotImplementedException("Heap memory writer not implemented yet")); } }
protected void validateSQLObject(List<SQLObject> queries, SQLObject testQuery) { int occurredCount = 0; for (SQLObject query: queries) { if (query.getType() == testQuery.getType()) { occurredCount++; } } if (occurredCount > 1) { throw new TajoInternalError("Duplicate Query type (" + testQuery.getType() + ") has found."); } }
public CompactRowBlockWriter(RowBlock rowBlock) { this.dataTypes = rowBlock.getDataTypes(); this.rowBlock = rowBlock; // compute the number of bytes, representing the null flags nullFlags = new BitArray(dataTypes.length); headerSize = RECORD_FIELD_SIZE + SizeOf.SIZE_OF_SHORT + nullFlags.bytesLength(); if (!rowBlock.getMemory().hasAddress()) { throw new TajoInternalError(rowBlock.getMemory().getClass().getSimpleName() + " does not support to direct memory access"); } }
public static VerificationState verify(VerificationState state, LogicalNode currentNode, EvalNode expression) { instance.visit(state, expression, new Stack<EvalNode>()); Set<Column> referredColumns = EvalTreeUtil.findUniqueColumns(expression); for (Column referredColumn : referredColumns) { if (!currentNode.getInSchema().contains(referredColumn)) { throw new TajoInternalError("Invalid State: " + referredColumn + " cannot be accessible at Node (" + currentNode.getPID() + ")"); } } return state; }
public TupleConverter initConverter() { switch (meta.getDataFormat()) { case BuiltinStorages.DRAW: return getDrawConverter(); case BuiltinStorages.RAW: return getRawConverter(); default: throw new TajoInternalError(new UnsupportedException()); } }
public static void scheduleFragmentsForNonLeafTasks(TaskSchedulerContext schedulerContext, MasterPlan masterPlan, Stage stage, int maxNum) throws IOException { DataChannel channel = masterPlan.getIncomingChannels(stage.getBlock().getId()).get(0); if (channel.isHashShuffle()) { scheduleHashShuffledFetches(schedulerContext, masterPlan, stage, channel, maxNum); } else if (channel.isRangeShuffle()) { scheduleRangeShuffledFetches(schedulerContext, masterPlan, stage, channel, maxNum); } else { throw new TajoInternalError("Cannot support partition type"); } }
public SortIntersectExec(TaskAttemptContext context, PhysicalExec left, PhysicalExec right, boolean isDistinct) { super(context, left.getSchema(), right.getSchema(), left, right); TajoDataTypes.DataType[] leftTypes = SchemaUtil.toDataTypes(left.getSchema()); TajoDataTypes.DataType[] rightTypes = SchemaUtil.toDataTypes(right.getSchema()); if (!CatalogUtil.isMatchedFunction(Arrays.asList(leftTypes), Arrays.asList(rightTypes))) { throw new TajoInternalError("the both schemas are not compatible"); } comparator = new SetTupleComparator(left.getSchema(), right.getSchema()); this.isDistinct = isDistinct; }